[ 594.264619] env[63024]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=63024) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 594.264949] env[63024]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=63024) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 594.264991] env[63024]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=63024) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 594.265347] env[63024]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 594.359655] env[63024]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=63024) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:383}} [ 594.369902] env[63024]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=63024) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:421}} [ 594.976046] env[63024]: INFO nova.virt.driver [None req-9942e2ec-f004-4d36-a3b1-d9ff40c008ee None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 595.046792] env[63024]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 595.046961] env[63024]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 595.047076] env[63024]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=63024) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 598.227061] env[63024]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-9f7c4db4-079c-4683-9655-87242cf674d7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.243512] env[63024]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=63024) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 598.243625] env[63024]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-17ae6ea8-ff65-4ec0-826f-6b84614d7443 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.278821] env[63024]: INFO oslo_vmware.api [-] Successfully established new session; session ID is a053c. [ 598.278950] env[63024]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.232s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 598.280588] env[63024]: INFO nova.virt.vmwareapi.driver [None req-9942e2ec-f004-4d36-a3b1-d9ff40c008ee None None] VMware vCenter version: 7.0.3 [ 598.284058] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfbe9e9f-e477-402f-a69f-fa3bd2e43fbb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.307209] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0effe8f0-2f2f-4b00-83e9-0ee13efe228b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.313562] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dd5ab8b-2795-43f5-aa4b-6303b885cb30 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.320518] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-962e6be5-13d0-4cf2-a17f-8a33c9394e56 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.333933] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da24f5fd-9ba9-4abf-a15c-c911c2e0faa0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.340151] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94973ea3-c71b-49d6-a594-6042f6aa7a39 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.370704] env[63024]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-1b5fdfe7-236c-4f41-b001-12109c86c7cd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.376402] env[63024]: DEBUG nova.virt.vmwareapi.driver [None req-9942e2ec-f004-4d36-a3b1-d9ff40c008ee None None] Extension org.openstack.compute already exists. {{(pid=63024) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:228}} [ 598.379157] env[63024]: INFO nova.compute.provider_config [None req-9942e2ec-f004-4d36-a3b1-d9ff40c008ee None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 598.881988] env[63024]: DEBUG nova.context [None req-9942e2ec-f004-4d36-a3b1-d9ff40c008ee None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),ebf2eb18-d093-4d8f-8384-591550af8828(cell1) {{(pid=63024) load_cells /opt/stack/nova/nova/context.py:464}} [ 598.884310] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 598.884723] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 598.885466] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 598.885910] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] Acquiring lock "ebf2eb18-d093-4d8f-8384-591550af8828" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 598.886126] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] Lock "ebf2eb18-d093-4d8f-8384-591550af8828" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 598.887190] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] Lock "ebf2eb18-d093-4d8f-8384-591550af8828" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 598.907480] env[63024]: INFO dbcounter [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] Registered counter for database nova_cell0 [ 598.915500] env[63024]: INFO dbcounter [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] Registered counter for database nova_cell1 [ 598.918960] env[63024]: DEBUG oslo_db.sqlalchemy.engines [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=63024) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:395}} [ 598.919316] env[63024]: DEBUG oslo_db.sqlalchemy.engines [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=63024) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:395}} [ 598.924147] env[63024]: ERROR nova.db.main.api [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 598.924147] env[63024]: result = function(*args, **kwargs) [ 598.924147] env[63024]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 598.924147] env[63024]: return func(*args, **kwargs) [ 598.924147] env[63024]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 598.924147] env[63024]: result = fn(*args, **kwargs) [ 598.924147] env[63024]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 598.924147] env[63024]: return f(*args, **kwargs) [ 598.924147] env[63024]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 598.924147] env[63024]: return db.service_get_minimum_version(context, binaries) [ 598.924147] env[63024]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 598.924147] env[63024]: _check_db_access() [ 598.924147] env[63024]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 598.924147] env[63024]: stacktrace = ''.join(traceback.format_stack()) [ 598.924147] env[63024]: [ 598.925129] env[63024]: ERROR nova.db.main.api [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 598.925129] env[63024]: result = function(*args, **kwargs) [ 598.925129] env[63024]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 598.925129] env[63024]: return func(*args, **kwargs) [ 598.925129] env[63024]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 598.925129] env[63024]: result = fn(*args, **kwargs) [ 598.925129] env[63024]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 598.925129] env[63024]: return f(*args, **kwargs) [ 598.925129] env[63024]: File "/opt/stack/nova/nova/objects/service.py", line 556, in _db_service_get_minimum_version [ 598.925129] env[63024]: return db.service_get_minimum_version(context, binaries) [ 598.925129] env[63024]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 598.925129] env[63024]: _check_db_access() [ 598.925129] env[63024]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 598.925129] env[63024]: stacktrace = ''.join(traceback.format_stack()) [ 598.925129] env[63024]: [ 598.925524] env[63024]: WARNING nova.objects.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] Failed to get minimum service version for cell ebf2eb18-d093-4d8f-8384-591550af8828 [ 598.925647] env[63024]: WARNING nova.objects.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 598.926080] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] Acquiring lock "singleton_lock" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 598.926242] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] Acquired lock "singleton_lock" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 598.926474] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] Releasing lock "singleton_lock" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 598.926793] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] Full set of CONF: {{(pid=63024) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 598.926934] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] ******************************************************************************** {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2804}} [ 598.927072] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] Configuration options gathered from: {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2805}} [ 598.927208] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 598.927400] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 598.927526] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] ================================================================================ {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 598.927734] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] allow_resize_to_same_host = True {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.927937] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] arq_binding_timeout = 300 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.928111] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] backdoor_port = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.928244] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] backdoor_socket = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.928412] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] block_device_allocate_retries = 60 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.928572] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] block_device_allocate_retries_interval = 3 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.928738] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cert = self.pem {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.928902] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.929082] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] compute_monitors = [] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.929257] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] config_dir = [] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.929423] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] config_drive_format = iso9660 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.929556] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.929718] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] config_source = [] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.929901] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] console_host = devstack {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.930143] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] control_exchange = nova {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.930317] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cpu_allocation_ratio = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.930479] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] daemon = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.930645] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] debug = True {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.930801] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] default_access_ip_network_name = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.930994] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] default_availability_zone = nova {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.931224] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] default_ephemeral_format = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.931396] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] default_green_pool_size = 1000 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.931645] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.931807] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] default_schedule_zone = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.931963] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] disk_allocation_ratio = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.932210] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] enable_new_services = True {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.932390] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] enabled_apis = ['osapi_compute'] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.932552] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] enabled_ssl_apis = [] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.932709] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] flat_injected = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.932865] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] force_config_drive = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.933032] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] force_raw_images = True {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.933202] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] graceful_shutdown_timeout = 5 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.933359] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] heal_instance_info_cache_interval = 60 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.933579] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] host = cpu-1 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.933749] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.933911] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] initial_disk_allocation_ratio = 1.0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.934083] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] initial_ram_allocation_ratio = 1.0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.934299] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.934461] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] instance_build_timeout = 0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.934619] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] instance_delete_interval = 300 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.934784] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] instance_format = [instance: %(uuid)s] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.934947] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] instance_name_template = instance-%08x {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.935121] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] instance_usage_audit = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.935291] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] instance_usage_audit_period = month {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.935455] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.935618] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] instances_path = /opt/stack/data/nova/instances {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.935782] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] internal_service_availability_zone = internal {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.935937] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] key = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.936110] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] live_migration_retry_count = 30 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.936280] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] log_color = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.936442] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] log_config_append = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.936606] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.936763] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] log_dir = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.936918] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] log_file = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.937059] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] log_options = True {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.937219] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] log_rotate_interval = 1 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.937383] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] log_rotate_interval_type = days {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.937549] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] log_rotation_type = none {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.937676] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.937798] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.937974] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.938140] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.938268] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.938420] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] long_rpc_timeout = 1800 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.938577] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] max_concurrent_builds = 10 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.938733] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] max_concurrent_live_migrations = 1 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.938887] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] max_concurrent_snapshots = 5 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.939054] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] max_local_block_devices = 3 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.939214] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] max_logfile_count = 30 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.939369] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] max_logfile_size_mb = 200 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.939524] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] maximum_instance_delete_attempts = 5 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.939688] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] metadata_listen = 0.0.0.0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.939894] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] metadata_listen_port = 8775 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.940069] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] metadata_workers = 2 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.940237] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] migrate_max_retries = -1 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.940402] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] mkisofs_cmd = genisoimage {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.940603] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] my_block_storage_ip = 10.180.1.21 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.940733] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] my_ip = 10.180.1.21 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.940959] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] my_shared_fs_storage_ip = 10.180.1.21 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.941164] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] network_allocate_retries = 0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.941364] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.941540] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] osapi_compute_listen = 0.0.0.0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.941702] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] osapi_compute_listen_port = 8774 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.941867] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] osapi_compute_unique_server_name_scope = {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.942046] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] osapi_compute_workers = 2 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.942210] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] password_length = 12 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.942369] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] periodic_enable = True {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.942526] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] periodic_fuzzy_delay = 60 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.942688] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] pointer_model = usbtablet {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.942852] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] preallocate_images = none {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.943015] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] publish_errors = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.943152] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] pybasedir = /opt/stack/nova {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.943305] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] ram_allocation_ratio = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.943460] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] rate_limit_burst = 0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.943620] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] rate_limit_except_level = CRITICAL {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.943772] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] rate_limit_interval = 0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.943927] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] reboot_timeout = 0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.944096] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] reclaim_instance_interval = 0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.944251] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] record = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.944413] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] reimage_timeout_per_gb = 60 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.944576] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] report_interval = 120 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.944732] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] rescue_timeout = 0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.944886] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] reserved_host_cpus = 0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.945054] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] reserved_host_disk_mb = 0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.945212] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] reserved_host_memory_mb = 512 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.945367] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] reserved_huge_pages = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.945523] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] resize_confirm_window = 0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.945676] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] resize_fs_using_block_device = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.945827] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] resume_guests_state_on_host_boot = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.945990] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.946164] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] rpc_response_timeout = 60 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.946321] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] run_external_periodic_tasks = True {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.946488] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] running_deleted_instance_action = reap {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.946644] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] running_deleted_instance_poll_interval = 1800 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.946796] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] running_deleted_instance_timeout = 0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.946947] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] scheduler_instance_sync_interval = 120 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.947125] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] service_down_time = 720 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.947291] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] servicegroup_driver = db {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.947439] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] shell_completion = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.947592] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] shelved_offload_time = 0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.947743] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] shelved_poll_interval = 3600 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.947907] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] shutdown_timeout = 0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.948074] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] source_is_ipv6 = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.948232] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] ssl_only = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.948484] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.948939] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] sync_power_state_interval = 600 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.949143] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] sync_power_state_pool_size = 1000 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.949323] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] syslog_log_facility = LOG_USER {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.949485] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] tempdir = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.949648] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] timeout_nbd = 10 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.949818] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] transport_url = **** {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.950016] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] update_resources_interval = 0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.950190] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] use_cow_images = True {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.950349] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] use_eventlog = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.950505] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] use_journal = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.950659] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] use_json = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.950813] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] use_rootwrap_daemon = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.951016] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] use_stderr = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.951199] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] use_syslog = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.951354] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vcpu_pin_set = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.951520] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vif_plugging_is_fatal = True {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.951684] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vif_plugging_timeout = 300 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.951847] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] virt_mkfs = [] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.952013] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] volume_usage_poll_interval = 0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.952180] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] watch_log_file = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.952345] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] web = /usr/share/spice-html5 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 598.952525] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.952689] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.952853] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.953038] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_concurrency.disable_process_locking = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.953335] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.953514] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.953678] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.953845] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.954018] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.954188] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.954368] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] api.auth_strategy = keystone {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.954531] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] api.compute_link_prefix = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.954707] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.954878] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] api.dhcp_domain = novalocal {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.955060] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] api.enable_instance_password = True {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.955227] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] api.glance_link_prefix = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.955393] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.955564] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.955726] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] api.instance_list_per_project_cells = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.955886] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] api.list_records_by_skipping_down_cells = True {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.956068] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] api.local_metadata_per_cell = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.956240] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] api.max_limit = 1000 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.956406] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] api.metadata_cache_expiration = 15 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.956575] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] api.neutron_default_tenant_id = default {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.956744] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] api.response_validation = warn {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.956906] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] api.use_neutron_default_nets = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.957084] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.957246] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.957409] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.957575] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.957739] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] api.vendordata_dynamic_targets = [] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.957896] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] api.vendordata_jsonfile_path = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.958093] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.958288] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cache.backend = dogpile.cache.memcached {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.958452] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cache.backend_argument = **** {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.958610] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cache.backend_expiration_time = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.958775] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cache.config_prefix = cache.oslo {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.958939] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cache.dead_timeout = 60.0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.959118] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cache.debug_cache_backend = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.959281] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cache.enable_retry_client = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.959440] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cache.enable_socket_keepalive = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.959609] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cache.enabled = True {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.959771] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cache.enforce_fips_mode = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.959967] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cache.expiration_time = 600 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.960158] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cache.hashclient_retry_attempts = 2 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.960329] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cache.hashclient_retry_delay = 1.0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.960495] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cache.memcache_dead_retry = 300 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.960652] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cache.memcache_password = **** {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.960815] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.961034] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.961220] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cache.memcache_pool_maxsize = 10 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.961387] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.961548] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cache.memcache_sasl_enabled = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.961725] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.961891] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cache.memcache_socket_timeout = 1.0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.962064] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cache.memcache_username = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.962229] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cache.proxies = [] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.962390] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cache.redis_db = 0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.962548] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cache.redis_password = **** {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.962716] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cache.redis_sentinel_service_name = mymaster {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.962886] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.963067] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cache.redis_server = localhost:6379 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.963232] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cache.redis_socket_timeout = 1.0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.963391] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cache.redis_username = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.963552] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cache.retry_attempts = 2 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.963714] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cache.retry_delay = 0.0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.963874] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cache.socket_keepalive_count = 1 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.964468] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cache.socket_keepalive_idle = 1 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.964468] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cache.socket_keepalive_interval = 1 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.964468] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cache.tls_allowed_ciphers = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.964606] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cache.tls_cafile = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.964635] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cache.tls_certfile = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.964792] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cache.tls_enabled = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.964947] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cache.tls_keyfile = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.965131] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cinder.auth_section = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.965301] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cinder.auth_type = password {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.965458] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cinder.cafile = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.965628] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cinder.catalog_info = volumev3::publicURL {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.965781] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cinder.certfile = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.965939] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cinder.collect_timing = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.966112] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cinder.cross_az_attach = True {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.966273] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cinder.debug = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.966430] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cinder.endpoint_template = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.966591] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cinder.http_retries = 3 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.966748] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cinder.insecure = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.966902] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cinder.keyfile = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.967086] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cinder.os_region_name = RegionOne {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.967252] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cinder.split_loggers = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.967414] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cinder.timeout = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.967576] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.967730] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] compute.cpu_dedicated_set = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.967884] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] compute.cpu_shared_set = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.968055] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] compute.image_type_exclude_list = [] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.968218] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.968378] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] compute.max_concurrent_disk_ops = 0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.968532] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] compute.max_disk_devices_to_attach = -1 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.968690] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.968854] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.969028] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] compute.resource_provider_association_refresh = 300 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.969190] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.969350] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] compute.shutdown_retry_interval = 10 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.969524] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.969697] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] conductor.workers = 2 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.969887] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] console.allowed_origins = [] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.970067] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] console.ssl_ciphers = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.970243] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] console.ssl_minimum_version = default {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.970409] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] consoleauth.enforce_session_timeout = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.970572] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] consoleauth.token_ttl = 600 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.970739] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cyborg.cafile = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.970911] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cyborg.certfile = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.971126] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cyborg.collect_timing = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.971296] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cyborg.connect_retries = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.971453] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cyborg.connect_retry_delay = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.971607] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cyborg.endpoint_override = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.971764] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cyborg.insecure = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.971917] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cyborg.keyfile = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.972090] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cyborg.max_version = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.972252] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cyborg.min_version = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.972406] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cyborg.region_name = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.972561] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cyborg.retriable_status_codes = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.972713] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cyborg.service_name = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.972882] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cyborg.service_type = accelerator {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.973053] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cyborg.split_loggers = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.973215] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cyborg.status_code_retries = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.973370] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cyborg.status_code_retry_delay = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.973522] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cyborg.timeout = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.973698] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.973856] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] cyborg.version = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.974036] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] database.asyncio_connection = **** {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.974196] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] database.asyncio_slave_connection = **** {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.974362] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] database.backend = sqlalchemy {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.974527] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] database.connection = **** {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.974946] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] database.connection_debug = 0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.974946] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] database.connection_parameters = {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.975096] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] database.connection_recycle_time = 3600 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.975164] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] database.connection_trace = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.975323] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] database.db_inc_retry_interval = True {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.975480] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] database.db_max_retries = 20 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.975637] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] database.db_max_retry_interval = 10 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.975793] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] database.db_retry_interval = 1 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.975950] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] database.max_overflow = 50 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.976120] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] database.max_pool_size = 5 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.976280] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] database.max_retries = 10 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.976443] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.976597] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] database.mysql_wsrep_sync_wait = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.976748] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] database.pool_timeout = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.976906] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] database.retry_interval = 10 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.977073] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] database.slave_connection = **** {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.977231] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] database.sqlite_synchronous = True {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.977387] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] database.use_db_reconnect = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.977550] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] api_database.asyncio_connection = **** {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.977705] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] api_database.asyncio_slave_connection = **** {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.977869] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] api_database.backend = sqlalchemy {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.978066] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] api_database.connection = **** {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.978209] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] api_database.connection_debug = 0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.978373] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] api_database.connection_parameters = {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.978526] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] api_database.connection_recycle_time = 3600 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.978682] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] api_database.connection_trace = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.978837] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] api_database.db_inc_retry_interval = True {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.978997] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] api_database.db_max_retries = 20 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.979193] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] api_database.db_max_retry_interval = 10 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.979378] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] api_database.db_retry_interval = 1 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.979588] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] api_database.max_overflow = 50 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.979757] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] api_database.max_pool_size = 5 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.979948] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] api_database.max_retries = 10 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.980141] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.980303] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.980461] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] api_database.pool_timeout = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.980622] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] api_database.retry_interval = 10 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.980780] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] api_database.slave_connection = **** {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.980956] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] api_database.sqlite_synchronous = True {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.981153] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] devices.enabled_mdev_types = [] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.981329] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.981496] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] ephemeral_storage_encryption.default_format = luks {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.981658] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] ephemeral_storage_encryption.enabled = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.981817] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.981995] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] glance.api_servers = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.982176] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] glance.cafile = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.982333] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] glance.certfile = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.982493] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] glance.collect_timing = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.982651] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] glance.connect_retries = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.982802] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] glance.connect_retry_delay = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.982960] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] glance.debug = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.983139] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] glance.default_trusted_certificate_ids = [] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.983303] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] glance.enable_certificate_validation = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.983463] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] glance.enable_rbd_download = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.983619] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] glance.endpoint_override = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.983781] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] glance.insecure = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.983938] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] glance.keyfile = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.984110] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] glance.max_version = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.984269] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] glance.min_version = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.984427] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] glance.num_retries = 3 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.984591] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] glance.rbd_ceph_conf = {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.984750] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] glance.rbd_connect_timeout = 5 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.984915] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] glance.rbd_pool = {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.985139] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] glance.rbd_user = {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.985302] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] glance.region_name = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.985459] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] glance.retriable_status_codes = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.985614] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] glance.service_name = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.985782] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] glance.service_type = image {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.985941] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] glance.split_loggers = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.986110] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] glance.status_code_retries = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.986266] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] glance.status_code_retry_delay = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.986419] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] glance.timeout = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.986596] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.986755] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] glance.verify_glance_signatures = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.986910] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] glance.version = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.987090] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] guestfs.debug = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.987261] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] manila.auth_section = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.987423] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] manila.auth_type = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.987579] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] manila.cafile = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.987732] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] manila.certfile = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.987893] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] manila.collect_timing = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.988064] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] manila.connect_retries = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.988226] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] manila.connect_retry_delay = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.988379] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] manila.endpoint_override = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.988544] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] manila.insecure = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.988686] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] manila.keyfile = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.988842] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] manila.max_version = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.988998] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] manila.min_version = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.989177] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] manila.region_name = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.989346] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] manila.retriable_status_codes = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.989501] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] manila.service_name = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.989668] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] manila.service_type = shared-file-system {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.989830] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] manila.share_apply_policy_timeout = 10 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.990031] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] manila.split_loggers = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.990202] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] manila.status_code_retries = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.990361] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] manila.status_code_retry_delay = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.990519] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] manila.timeout = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.990700] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] manila.valid_interfaces = ['internal', 'public'] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.990871] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] manila.version = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.991099] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] mks.enabled = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.991458] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.991646] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] image_cache.manager_interval = 2400 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.991813] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] image_cache.precache_concurrency = 1 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.991988] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] image_cache.remove_unused_base_images = True {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.992173] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.992341] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.992514] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] image_cache.subdirectory_name = _base {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.992687] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] ironic.api_max_retries = 60 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.992848] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] ironic.api_retry_interval = 2 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.993009] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] ironic.auth_section = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.993181] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] ironic.auth_type = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.993341] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] ironic.cafile = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.993498] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] ironic.certfile = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.993660] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] ironic.collect_timing = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.993821] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] ironic.conductor_group = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.993980] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] ironic.connect_retries = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.994151] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] ironic.connect_retry_delay = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.994307] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] ironic.endpoint_override = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.994466] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] ironic.insecure = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.994620] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] ironic.keyfile = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.994777] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] ironic.max_version = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.994931] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] ironic.min_version = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.995115] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] ironic.peer_list = [] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.995274] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] ironic.region_name = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.995431] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] ironic.retriable_status_codes = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.995592] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] ironic.serial_console_state_timeout = 10 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.995746] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] ironic.service_name = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.995916] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] ironic.service_type = baremetal {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.996086] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] ironic.shard = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.996249] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] ironic.split_loggers = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.996405] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] ironic.status_code_retries = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.996558] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] ironic.status_code_retry_delay = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.996713] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] ironic.timeout = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.996888] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.997061] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] ironic.version = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.997245] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.997419] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] key_manager.fixed_key = **** {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.997597] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.997755] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] barbican.barbican_api_version = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.997911] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] barbican.barbican_endpoint = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.998094] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] barbican.barbican_endpoint_type = public {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.998278] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] barbican.barbican_region_name = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.998409] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] barbican.cafile = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.998565] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] barbican.certfile = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.998728] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] barbican.collect_timing = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.998886] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] barbican.insecure = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.999053] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] barbican.keyfile = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.999219] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] barbican.number_of_retries = 60 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.999379] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] barbican.retry_delay = 1 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.999539] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] barbican.send_service_user_token = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.999700] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] barbican.split_loggers = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 598.999857] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] barbican.timeout = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.000051] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] barbican.verify_ssl = True {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.000217] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] barbican.verify_ssl_path = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.000385] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] barbican_service_user.auth_section = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.000548] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] barbican_service_user.auth_type = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.000706] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] barbican_service_user.cafile = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.000876] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] barbican_service_user.certfile = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.001084] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] barbican_service_user.collect_timing = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.001258] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] barbican_service_user.insecure = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.001416] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] barbican_service_user.keyfile = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.001576] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] barbican_service_user.split_loggers = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.001734] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] barbican_service_user.timeout = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.001899] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vault.approle_role_id = **** {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.002094] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vault.approle_secret_id = **** {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.002238] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vault.kv_mountpoint = secret {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.002396] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vault.kv_path = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.002558] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vault.kv_version = 2 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.002715] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vault.namespace = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.002872] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vault.root_token_id = **** {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.003042] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vault.ssl_ca_crt_file = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.003211] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vault.timeout = 60.0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.003371] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vault.use_ssl = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.003545] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.003713] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] keystone.cafile = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.003872] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] keystone.certfile = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.004044] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] keystone.collect_timing = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.004208] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] keystone.connect_retries = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.004364] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] keystone.connect_retry_delay = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.004519] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] keystone.endpoint_override = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.004678] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] keystone.insecure = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.004831] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] keystone.keyfile = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.004987] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] keystone.max_version = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.005156] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] keystone.min_version = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.005314] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] keystone.region_name = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.005474] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] keystone.retriable_status_codes = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.005630] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] keystone.service_name = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.005798] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] keystone.service_type = identity {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.005958] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] keystone.split_loggers = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.006128] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] keystone.status_code_retries = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.006287] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] keystone.status_code_retry_delay = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.006441] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] keystone.timeout = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.006619] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.006778] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] keystone.version = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.006977] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.connection_uri = {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.007151] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.cpu_mode = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.007316] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.cpu_model_extra_flags = [] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.007481] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.cpu_models = [] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.007649] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.cpu_power_governor_high = performance {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.007816] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.cpu_power_governor_low = powersave {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.007977] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.cpu_power_management = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.008162] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.008333] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.device_detach_attempts = 8 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.008495] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.device_detach_timeout = 20 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.008657] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.disk_cachemodes = [] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.008814] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.disk_prefix = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.008975] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.enabled_perf_events = [] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.009154] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.file_backed_memory = 0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.009319] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.gid_maps = [] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.009473] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.hw_disk_discard = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.009628] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.hw_machine_type = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.009794] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.images_rbd_ceph_conf = {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.009983] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.010172] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.010344] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.images_rbd_glance_store_name = {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.010514] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.images_rbd_pool = rbd {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.010683] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.images_type = default {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.010847] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.images_volume_group = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.011059] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.inject_key = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.011243] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.inject_partition = -2 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.011405] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.inject_password = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.011566] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.iscsi_iface = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.011726] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.iser_use_multipath = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.011886] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.live_migration_bandwidth = 0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.012061] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.012225] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.live_migration_downtime = 500 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.012386] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.012544] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.012704] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.live_migration_inbound_addr = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.012862] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.013031] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.live_migration_permit_post_copy = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.013191] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.live_migration_scheme = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.013363] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.live_migration_timeout_action = abort {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.013522] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.live_migration_tunnelled = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.013679] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.live_migration_uri = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.013838] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.live_migration_with_native_tls = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.013994] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.max_queues = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.014170] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.014396] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.014558] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.nfs_mount_options = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.014864] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.015064] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.015236] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.num_iser_scan_tries = 5 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.015399] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.num_memory_encrypted_guests = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.015562] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.015724] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.num_pcie_ports = 0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.015890] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.num_volume_scan_tries = 5 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.016070] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.pmem_namespaces = [] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.016233] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.quobyte_client_cfg = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.016534] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.016716] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.rbd_connect_timeout = 5 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.016881] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.017060] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.017219] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.rbd_secret_uuid = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.017375] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.rbd_user = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.017533] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.017700] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.remote_filesystem_transport = ssh {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.017858] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.rescue_image_id = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.018022] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.rescue_kernel_id = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.018180] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.rescue_ramdisk_id = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.018347] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.018529] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.rx_queue_size = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.018665] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.smbfs_mount_options = {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.018937] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.019123] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.snapshot_compression = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.019282] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.snapshot_image_format = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.019511] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.019677] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.sparse_logical_volumes = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.019837] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.swtpm_enabled = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.020055] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.swtpm_group = tss {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.020235] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.swtpm_user = tss {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.020406] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.sysinfo_serial = unique {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.020563] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.tb_cache_size = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.020719] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.tx_queue_size = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.020895] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.uid_maps = [] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.021109] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.use_virtio_for_bridges = True {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.021293] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.virt_type = kvm {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.021463] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.volume_clear = zero {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.021624] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.volume_clear_size = 0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.021785] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.volume_use_multipath = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.021941] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.vzstorage_cache_path = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.022130] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.022299] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.vzstorage_mount_group = qemu {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.022459] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.vzstorage_mount_opts = [] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.022624] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.022909] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.023102] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.vzstorage_mount_user = stack {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.023271] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.023443] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] neutron.auth_section = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.023618] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] neutron.auth_type = password {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.023778] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] neutron.cafile = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.023938] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] neutron.certfile = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.024117] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] neutron.collect_timing = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.024281] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] neutron.connect_retries = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.024439] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] neutron.connect_retry_delay = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.024611] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] neutron.default_floating_pool = public {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.024769] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] neutron.endpoint_override = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.024933] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] neutron.extension_sync_interval = 600 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.025109] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] neutron.http_retries = 3 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.025272] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] neutron.insecure = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.025430] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] neutron.keyfile = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.025591] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] neutron.max_version = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.025761] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.025919] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] neutron.min_version = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.026100] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] neutron.ovs_bridge = br-int {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.026272] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] neutron.physnets = [] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.026442] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] neutron.region_name = RegionOne {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.026600] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] neutron.retriable_status_codes = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.026768] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] neutron.service_metadata_proxy = True {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.026927] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] neutron.service_name = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.027110] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] neutron.service_type = network {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.027274] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] neutron.split_loggers = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.027431] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] neutron.status_code_retries = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.027586] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] neutron.status_code_retry_delay = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.027743] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] neutron.timeout = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.027922] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.028098] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] neutron.version = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.028272] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] notifications.bdms_in_notifications = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.028448] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] notifications.default_level = INFO {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.028640] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] notifications.notification_format = unversioned {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.028782] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] notifications.notify_on_state_change = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.028957] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.029149] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] pci.alias = [] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.029318] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] pci.device_spec = [] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.029480] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] pci.report_in_placement = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.029648] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] placement.auth_section = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.029818] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] placement.auth_type = password {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.030020] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.030190] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] placement.cafile = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.030350] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] placement.certfile = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.030509] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] placement.collect_timing = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.030668] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] placement.connect_retries = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.030824] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] placement.connect_retry_delay = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.031034] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] placement.default_domain_id = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.031212] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] placement.default_domain_name = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.031371] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] placement.domain_id = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.031528] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] placement.domain_name = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.031682] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] placement.endpoint_override = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.031841] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] placement.insecure = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.031994] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] placement.keyfile = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.032168] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] placement.max_version = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.032322] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] placement.min_version = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.032487] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] placement.password = **** {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.032644] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] placement.project_domain_id = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.032808] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] placement.project_domain_name = Default {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.032974] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] placement.project_id = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.033162] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] placement.project_name = service {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.033331] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] placement.region_name = RegionOne {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.033489] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] placement.retriable_status_codes = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.033644] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] placement.service_name = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.033812] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] placement.service_type = placement {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.033975] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] placement.split_loggers = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.034148] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] placement.status_code_retries = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.034306] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] placement.status_code_retry_delay = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.034461] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] placement.system_scope = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.034623] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] placement.timeout = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.034781] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] placement.trust_id = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.034935] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] placement.user_domain_id = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.035117] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] placement.user_domain_name = Default {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.035276] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] placement.user_id = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.035449] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] placement.username = nova {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.035628] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.035788] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] placement.version = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.035964] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] quota.cores = 20 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.036142] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] quota.count_usage_from_placement = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.036312] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.036479] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] quota.injected_file_content_bytes = 10240 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.036644] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] quota.injected_file_path_length = 255 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.036807] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] quota.injected_files = 5 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.036972] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] quota.instances = 10 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.037155] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] quota.key_pairs = 100 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.037324] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] quota.metadata_items = 128 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.037489] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] quota.ram = 51200 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.037652] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] quota.recheck_quota = True {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.037819] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] quota.server_group_members = 10 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.037984] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] quota.server_groups = 10 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.038207] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] quota.unified_limits_resource_list = ['servers'] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.038382] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] quota.unified_limits_resource_strategy = require {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.038555] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.038748] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.038873] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] scheduler.image_metadata_prefilter = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.039044] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.039208] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] scheduler.max_attempts = 3 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.039367] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] scheduler.max_placement_results = 1000 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.039528] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.039689] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] scheduler.query_placement_for_image_type_support = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.039851] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.040059] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] scheduler.workers = 2 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.040241] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.040413] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.040591] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.040757] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.040941] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.041153] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.041331] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.041527] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.041699] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] filter_scheduler.host_subset_size = 1 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.041860] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.042029] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.042202] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.042366] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] filter_scheduler.isolated_hosts = [] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.042529] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] filter_scheduler.isolated_images = [] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.042691] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.042854] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.043050] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.043218] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] filter_scheduler.pci_in_placement = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.043377] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.043535] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.043692] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.043850] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.044015] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.044186] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.044345] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] filter_scheduler.track_instance_changes = True {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.044518] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.044684] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] metrics.required = True {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.044843] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] metrics.weight_multiplier = 1.0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.045007] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.045226] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] metrics.weight_setting = [] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.045560] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.045737] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] serial_console.enabled = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.045917] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] serial_console.port_range = 10000:20000 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.046103] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.046275] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.046441] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] serial_console.serialproxy_port = 6083 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.046607] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] service_user.auth_section = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.046778] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] service_user.auth_type = password {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.046936] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] service_user.cafile = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.047106] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] service_user.certfile = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.047269] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] service_user.collect_timing = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.047429] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] service_user.insecure = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.047582] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] service_user.keyfile = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.047754] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] service_user.send_service_user_token = True {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.047914] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] service_user.split_loggers = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.048086] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] service_user.timeout = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.048257] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] spice.agent_enabled = True {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.048417] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] spice.enabled = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.048729] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.048940] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.049132] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] spice.html5proxy_port = 6082 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.049286] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] spice.image_compression = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.049444] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] spice.jpeg_compression = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.049600] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] spice.playback_compression = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.049757] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] spice.require_secure = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.049952] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] spice.server_listen = 127.0.0.1 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.050155] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.050318] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] spice.streaming_mode = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.050472] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] spice.zlib_compression = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.050634] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] upgrade_levels.baseapi = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.050805] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] upgrade_levels.compute = auto {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.050996] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] upgrade_levels.conductor = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.051185] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] upgrade_levels.scheduler = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.051354] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vendordata_dynamic_auth.auth_section = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.051518] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vendordata_dynamic_auth.auth_type = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.051676] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vendordata_dynamic_auth.cafile = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.051831] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vendordata_dynamic_auth.certfile = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.051992] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.052169] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vendordata_dynamic_auth.insecure = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.052325] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vendordata_dynamic_auth.keyfile = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.052485] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.052640] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vendordata_dynamic_auth.timeout = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.052811] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vmware.api_retry_count = 10 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.052971] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vmware.ca_file = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.053161] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vmware.cache_prefix = devstack-image-cache {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.053334] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vmware.cluster_name = testcl1 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.053499] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vmware.connection_pool_size = 10 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.053658] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vmware.console_delay_seconds = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.053827] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vmware.datastore_regex = ^datastore.* {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.054046] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.054229] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vmware.host_password = **** {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.054397] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vmware.host_port = 443 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.054565] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vmware.host_username = administrator@vsphere.local {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.054731] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vmware.insecure = True {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.054890] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vmware.integration_bridge = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.055066] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vmware.maximum_objects = 100 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.055225] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vmware.pbm_default_policy = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.055384] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vmware.pbm_enabled = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.055538] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vmware.pbm_wsdl_location = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.055704] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.055862] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vmware.serial_port_proxy_uri = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.056027] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vmware.serial_port_service_uri = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.056193] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vmware.task_poll_interval = 0.5 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.056363] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vmware.use_linked_clone = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.056530] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vmware.vnc_keymap = en-us {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.056694] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vmware.vnc_port = 5900 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.056855] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vmware.vnc_port_total = 10000 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.057050] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vnc.auth_schemes = ['none'] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.057229] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vnc.enabled = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.057517] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.057701] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.057872] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vnc.novncproxy_port = 6080 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.058082] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vnc.server_listen = 127.0.0.1 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.058269] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.058430] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vnc.vencrypt_ca_certs = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.058586] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vnc.vencrypt_client_cert = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.058743] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vnc.vencrypt_client_key = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.058962] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.059093] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] workarounds.disable_deep_image_inspection = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.059259] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.059418] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.059576] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.059734] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] workarounds.disable_rootwrap = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.059927] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] workarounds.enable_numa_live_migration = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.060096] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.060261] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.060420] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.060578] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] workarounds.libvirt_disable_apic = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.060732] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.060908] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.061118] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.061287] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.061446] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.061603] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.061758] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.061917] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.062089] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.062259] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.062436] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.062605] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] wsgi.client_socket_timeout = 900 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.062770] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] wsgi.default_pool_size = 1000 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.062935] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] wsgi.keep_alive = True {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.063115] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] wsgi.max_header_line = 16384 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.063279] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] wsgi.secure_proxy_ssl_header = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.063441] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] wsgi.ssl_ca_file = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.063599] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] wsgi.ssl_cert_file = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.063756] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] wsgi.ssl_key_file = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.063919] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] wsgi.tcp_keepidle = 600 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.064104] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.064272] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] zvm.ca_file = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.064432] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] zvm.cloud_connector_url = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.064720] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.064895] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] zvm.reachable_timeout = 300 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.065081] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.065264] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.065439] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] profiler.connection_string = messaging:// {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.065611] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] profiler.enabled = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.065781] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] profiler.es_doc_type = notification {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.065944] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] profiler.es_scroll_size = 10000 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.066124] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] profiler.es_scroll_time = 2m {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.066288] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] profiler.filter_error_trace = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.066456] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] profiler.hmac_keys = **** {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.066621] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] profiler.sentinel_service_name = mymaster {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.066783] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] profiler.socket_timeout = 0.1 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.066941] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] profiler.trace_requests = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.067115] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] profiler.trace_sqlalchemy = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.067302] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] profiler_jaeger.process_tags = {} {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.067462] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] profiler_jaeger.service_name_prefix = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.067622] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] profiler_otlp.service_name_prefix = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.067784] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] remote_debug.host = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.067942] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] remote_debug.port = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.068135] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.068297] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.068457] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.068615] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.068771] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.068928] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.069099] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.069260] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.069429] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.069597] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.069754] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.069952] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.070147] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.070317] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.070487] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.070652] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.070811] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.071035] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.071215] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.071381] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.071549] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.071713] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.071876] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.072059] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.072233] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.072387] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.072545] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.072703] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.072867] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.073064] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_messaging_rabbit.ssl = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.073272] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.073449] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.073613] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.073784] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.073952] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_messaging_rabbit.ssl_version = {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.074128] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.074327] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.074495] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_messaging_notifications.retry = -1 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.074676] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.074848] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_messaging_notifications.transport_url = **** {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.075034] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_limit.auth_section = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.075202] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_limit.auth_type = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.075360] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_limit.cafile = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.075513] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_limit.certfile = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.075673] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_limit.collect_timing = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.075828] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_limit.connect_retries = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.075980] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_limit.connect_retry_delay = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.076151] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_limit.endpoint_id = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.076319] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_limit.endpoint_interface = publicURL {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.076473] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_limit.endpoint_override = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.076626] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_limit.endpoint_region_name = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.076778] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_limit.endpoint_service_name = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.076930] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_limit.endpoint_service_type = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.077101] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_limit.insecure = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.077257] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_limit.keyfile = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.077410] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_limit.max_version = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.077562] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_limit.min_version = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.077713] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_limit.region_name = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.077866] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_limit.retriable_status_codes = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.078029] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_limit.service_name = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.078186] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_limit.service_type = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.078345] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_limit.split_loggers = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.078501] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_limit.status_code_retries = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.078658] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_limit.status_code_retry_delay = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.078814] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_limit.timeout = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.078970] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_limit.valid_interfaces = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.079167] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_limit.version = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.079301] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_reports.file_event_handler = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.079463] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.079618] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] oslo_reports.log_dir = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.079786] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.079967] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.080152] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.080319] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.080488] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.080642] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.080808] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.080998] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vif_plug_ovs_privileged.group = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.081185] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.081348] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.081507] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.081662] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] vif_plug_ovs_privileged.user = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.081828] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] os_vif_linux_bridge.flat_interface = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.082008] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.082218] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.082359] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.082525] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.082686] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.082848] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.083017] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.083208] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.083377] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] os_vif_ovs.isolate_vif = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.083541] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.083703] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.083868] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.084046] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] os_vif_ovs.ovsdb_interface = native {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.084210] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] os_vif_ovs.per_port_bridge = False {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.084381] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] privsep_osbrick.capabilities = [21] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.084538] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] privsep_osbrick.group = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.084694] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] privsep_osbrick.helper_command = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.084855] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.085025] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.085187] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] privsep_osbrick.user = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.085360] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.085513] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] nova_sys_admin.group = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.085665] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] nova_sys_admin.helper_command = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.085823] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.085983] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.086148] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] nova_sys_admin.user = None {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 599.086278] env[63024]: DEBUG oslo_service.service [None req-1c87857a-5f51-4325-841a-e16f77ac0ae1 None None] ******************************************************************************** {{(pid=63024) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2828}} [ 599.086689] env[63024]: INFO nova.service [-] Starting compute node (version 0.1.0) [ 599.590129] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-53d12fbb-3f98-42a6-867b-1b9f5750729d None None] Getting list of instances from cluster (obj){ [ 599.590129] env[63024]: value = "domain-c8" [ 599.590129] env[63024]: _type = "ClusterComputeResource" [ 599.590129] env[63024]: } {{(pid=63024) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 599.591246] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aea3c057-3f38-4b3c-b5e5-b0a39b576e4c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.600068] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-53d12fbb-3f98-42a6-867b-1b9f5750729d None None] Got total of 0 instances {{(pid=63024) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 599.600638] env[63024]: WARNING nova.virt.vmwareapi.driver [None req-53d12fbb-3f98-42a6-867b-1b9f5750729d None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 599.601116] env[63024]: INFO nova.virt.node [None req-53d12fbb-3f98-42a6-867b-1b9f5750729d None None] Generated node identity 89dfa68a-133e-436f-a9f1-86051f9fb96b [ 599.601350] env[63024]: INFO nova.virt.node [None req-53d12fbb-3f98-42a6-867b-1b9f5750729d None None] Wrote node identity 89dfa68a-133e-436f-a9f1-86051f9fb96b to /opt/stack/data/n-cpu-1/compute_id [ 600.104306] env[63024]: WARNING nova.compute.manager [None req-53d12fbb-3f98-42a6-867b-1b9f5750729d None None] Compute nodes ['89dfa68a-133e-436f-a9f1-86051f9fb96b'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 601.111694] env[63024]: INFO nova.compute.manager [None req-53d12fbb-3f98-42a6-867b-1b9f5750729d None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 602.118980] env[63024]: WARNING nova.compute.manager [None req-53d12fbb-3f98-42a6-867b-1b9f5750729d None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 602.119339] env[63024]: DEBUG oslo_concurrency.lockutils [None req-53d12fbb-3f98-42a6-867b-1b9f5750729d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 602.119477] env[63024]: DEBUG oslo_concurrency.lockutils [None req-53d12fbb-3f98-42a6-867b-1b9f5750729d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 602.119628] env[63024]: DEBUG oslo_concurrency.lockutils [None req-53d12fbb-3f98-42a6-867b-1b9f5750729d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 602.119800] env[63024]: DEBUG nova.compute.resource_tracker [None req-53d12fbb-3f98-42a6-867b-1b9f5750729d None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63024) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 602.120792] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d9b2cf0-3cea-4dc5-a8d0-45b8f56337cb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.129098] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-297c7e49-95ae-488b-9f4d-7c2ecbdaa850 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.143137] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72abfc2a-bb86-4460-88e4-960c62689b4c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.149916] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6c4eda5-2998-4ad4-b403-9540bc2b3765 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.179543] env[63024]: DEBUG nova.compute.resource_tracker [None req-53d12fbb-3f98-42a6-867b-1b9f5750729d None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181330MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=63024) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 602.179673] env[63024]: DEBUG oslo_concurrency.lockutils [None req-53d12fbb-3f98-42a6-867b-1b9f5750729d None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 602.179866] env[63024]: DEBUG oslo_concurrency.lockutils [None req-53d12fbb-3f98-42a6-867b-1b9f5750729d None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 602.683136] env[63024]: WARNING nova.compute.resource_tracker [None req-53d12fbb-3f98-42a6-867b-1b9f5750729d None None] No compute node record for cpu-1:89dfa68a-133e-436f-a9f1-86051f9fb96b: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 89dfa68a-133e-436f-a9f1-86051f9fb96b could not be found. [ 603.190527] env[63024]: INFO nova.compute.resource_tracker [None req-53d12fbb-3f98-42a6-867b-1b9f5750729d None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 89dfa68a-133e-436f-a9f1-86051f9fb96b [ 604.698350] env[63024]: DEBUG nova.compute.resource_tracker [None req-53d12fbb-3f98-42a6-867b-1b9f5750729d None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 604.698724] env[63024]: DEBUG nova.compute.resource_tracker [None req-53d12fbb-3f98-42a6-867b-1b9f5750729d None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 604.855168] env[63024]: INFO nova.scheduler.client.report [None req-53d12fbb-3f98-42a6-867b-1b9f5750729d None None] [req-3fcd5521-f913-4ba3-80c6-737632712c40] Created resource provider record via placement API for resource provider with UUID 89dfa68a-133e-436f-a9f1-86051f9fb96b and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 604.871972] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42d3390a-a0f4-4d6c-9310-7ca3f93a8394 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.879431] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38ee3ef8-6d0e-4968-ab3d-4cacb49ccf53 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.908962] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ccffd96-10e2-4f6f-a0eb-66c0742ad8ad {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.916040] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d5e6b9f-12fb-4f35-8eb5-6da170cb9925 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.929018] env[63024]: DEBUG nova.compute.provider_tree [None req-53d12fbb-3f98-42a6-867b-1b9f5750729d None None] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 605.465295] env[63024]: DEBUG nova.scheduler.client.report [None req-53d12fbb-3f98-42a6-867b-1b9f5750729d None None] Updated inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 605.465531] env[63024]: DEBUG nova.compute.provider_tree [None req-53d12fbb-3f98-42a6-867b-1b9f5750729d None None] Updating resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b generation from 0 to 1 during operation: update_inventory {{(pid=63024) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 605.465671] env[63024]: DEBUG nova.compute.provider_tree [None req-53d12fbb-3f98-42a6-867b-1b9f5750729d None None] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 605.516859] env[63024]: DEBUG nova.compute.provider_tree [None req-53d12fbb-3f98-42a6-867b-1b9f5750729d None None] Updating resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b generation from 1 to 2 during operation: update_traits {{(pid=63024) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 606.021370] env[63024]: DEBUG nova.compute.resource_tracker [None req-53d12fbb-3f98-42a6-867b-1b9f5750729d None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63024) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 606.021790] env[63024]: DEBUG oslo_concurrency.lockutils [None req-53d12fbb-3f98-42a6-867b-1b9f5750729d None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.842s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 606.021790] env[63024]: DEBUG nova.service [None req-53d12fbb-3f98-42a6-867b-1b9f5750729d None None] Creating RPC server for service compute {{(pid=63024) start /opt/stack/nova/nova/service.py:186}} [ 606.036047] env[63024]: DEBUG nova.service [None req-53d12fbb-3f98-42a6-867b-1b9f5750729d None None] Join ServiceGroup membership for this service compute {{(pid=63024) start /opt/stack/nova/nova/service.py:203}} [ 606.036236] env[63024]: DEBUG nova.servicegroup.drivers.db [None req-53d12fbb-3f98-42a6-867b-1b9f5750729d None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=63024) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 624.044199] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._sync_power_states {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 624.549021] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Getting list of instances from cluster (obj){ [ 624.549021] env[63024]: value = "domain-c8" [ 624.549021] env[63024]: _type = "ClusterComputeResource" [ 624.549021] env[63024]: } {{(pid=63024) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 624.549021] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e6cc10b-d1ca-40b7-983f-2ef534171022 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.557063] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Got total of 0 instances {{(pid=63024) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 624.557501] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 624.557945] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Getting list of instances from cluster (obj){ [ 624.557945] env[63024]: value = "domain-c8" [ 624.557945] env[63024]: _type = "ClusterComputeResource" [ 624.557945] env[63024]: } {{(pid=63024) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 624.559186] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3cbb0ed-6804-42ec-a0f2-5fb3aa217bac {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.567336] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Got total of 0 instances {{(pid=63024) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 654.426881] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 654.427275] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 654.427398] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Starting heal instance info cache {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10258}} [ 654.427459] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Rebuilding the list of instances to heal {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10262}} [ 654.930935] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Didn't find any instances for network info cache update. {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10344}} [ 654.931176] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 654.931420] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 654.931620] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 654.931815] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 654.931994] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 654.932188] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 654.932349] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63024) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10877}} [ 654.932493] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager.update_available_resource {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 655.435612] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 655.435952] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 655.436071] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 655.436169] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63024) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 655.437022] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f52381f5-ccaf-470d-9087-fa7739a9ce8b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.444994] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-037e8f2b-ff28-4708-8dfd-043d12e63f39 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.458230] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff9e97db-74d8-4288-a77a-6cfd85e0584f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.464453] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4265066-ded5-4843-91e3-8547b522fdd9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.492770] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181330MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=63024) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 655.493046] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 655.493635] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 656.513560] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 656.513831] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 656.528790] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7a03024-cd23-434d-a0e7-411290eab0bc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.535934] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cba6c872-ded0-4d6c-b44f-84f71f24f6c7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.564377] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e61024cf-a031-4689-98e6-cbc6b4ff1ef1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.571257] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-329622ea-c386-4ee2-9050-f91fa0618367 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.583898] env[63024]: DEBUG nova.compute.provider_tree [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 657.087573] env[63024]: DEBUG nova.scheduler.client.report [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 657.591849] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63024) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 657.592236] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.099s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 717.577893] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 717.578287] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 718.084839] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 718.085021] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Starting heal instance info cache {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10258}} [ 718.085144] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Rebuilding the list of instances to heal {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10262}} [ 718.587407] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Didn't find any instances for network info cache update. {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10344}} [ 718.587730] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 718.587848] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 718.587917] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 718.588083] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 718.588224] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 718.588361] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 718.588486] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63024) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10877}} [ 718.588619] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager.update_available_resource {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 719.094023] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 719.094023] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 719.094023] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 719.094023] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63024) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 719.094023] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f9ecb1c-8343-4199-bc5c-92d6f89a80af {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.101858] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7964a5a-c81d-4ce6-ac53-be0362ec51c3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.115595] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea9af410-294a-4014-a6a4-5a020f882261 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.121843] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38910d48-3132-4556-be9e-ea56276158cc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.149660] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181333MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=63024) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 719.150039] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 719.150375] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 720.168715] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 720.168960] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 720.181396] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c02d67e-ab28-4738-be12-9e90bf7e0bcd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.189247] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27c110e4-a808-4a19-bcb0-a6280cd682b8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.218687] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6099f145-fb6f-467c-9d2d-87fcbdd92b7b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.225436] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fff205e4-18b5-4e28-b9a5-9c88f5c9a00a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.237857] env[63024]: DEBUG nova.compute.provider_tree [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 720.741058] env[63024]: DEBUG nova.scheduler.client.report [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 720.742538] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63024) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 720.742716] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.592s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 780.743656] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 780.744050] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 780.744162] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Starting heal instance info cache {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10258}} [ 780.744237] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Rebuilding the list of instances to heal {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10262}} [ 781.247584] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Didn't find any instances for network info cache update. {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10344}} [ 781.247834] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 781.247967] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 781.248121] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 781.248266] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 781.248402] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 781.248549] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 781.248682] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63024) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10877}} [ 781.248805] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager.update_available_resource {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 781.751654] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 781.752199] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 781.752199] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 781.752320] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63024) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 781.753356] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cab68315-fde8-4835-af83-7f5e04db6f58 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.761718] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7e7a8e3-25b4-41d7-b68a-a2e6406f11cd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.775372] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-696f3825-31aa-4982-9d3f-b59370d70af6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.781328] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a65b5068-6ad0-4b22-b935-6085de6e39c5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.809188] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181319MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=63024) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 781.809334] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 781.809507] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 782.830581] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 782.830855] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 782.845789] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fda5267-60b9-4b8a-835f-eb546d76f920 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.853806] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-364201b7-99df-41de-bc5a-40a0ccc0b2fa {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.885011] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3643ec99-999e-40fc-bfc8-4f20c425d332 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.892890] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93cde3ef-dd25-48ea-a553-2f551289a42e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.906697] env[63024]: DEBUG nova.compute.provider_tree [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 783.410992] env[63024]: DEBUG nova.scheduler.client.report [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 783.412363] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63024) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 783.412561] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.603s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 837.080967] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 837.587061] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 837.587061] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Starting heal instance info cache {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10258}} [ 837.587061] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Rebuilding the list of instances to heal {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10262}} [ 838.088114] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Didn't find any instances for network info cache update. {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10344}} [ 838.088432] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 838.088537] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 838.088664] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 838.088807] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 838.088936] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 838.089090] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 838.089240] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63024) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10877}} [ 838.089428] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager.update_available_resource {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 838.592512] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.592788] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 838.592940] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 838.593126] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63024) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 838.594009] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e0de05a-240c-4bb2-8ae3-eecb86b8a22f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.602375] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0d07025-24ed-4ff9-90c2-4b38b21efdd6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.616721] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfb77aa9-1375-4f29-aaad-5f504ed61ed6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.623160] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5bd2606-cd34-4a3d-bfdd-ccd331ca6923 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.650723] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181326MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=63024) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 838.650861] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.651039] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 839.668140] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 839.668398] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 839.683148] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e8e848f-a09d-4f7a-b746-32cda6955071 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.690523] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17d98727-a7aa-470c-a6b2-dd9fbda4583a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.720568] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e13df35c-caed-4236-9a8a-d35168d1806b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.727338] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43a5b6b0-eb47-43f8-963e-3b0671bd8c83 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.739723] env[63024]: DEBUG nova.compute.provider_tree [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 840.242597] env[63024]: DEBUG nova.scheduler.client.report [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 840.243854] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63024) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 840.244050] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.593s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 842.575628] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 894.417899] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 894.417899] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 894.417899] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Cleaning up deleted instances {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11558}} [ 894.924061] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] There are 0 instances to clean {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11567}} [ 894.924266] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 894.924266] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Cleaning up deleted instances with incomplete migration {{(pid=63024) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11596}} [ 895.426951] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 897.930317] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 897.930690] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Starting heal instance info cache {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10258}} [ 897.930690] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Rebuilding the list of instances to heal {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10262}} [ 898.433238] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Didn't find any instances for network info cache update. {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10344}} [ 898.433629] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 898.433845] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 898.434022] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 898.434167] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 898.434313] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 898.434439] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63024) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10877}} [ 898.434589] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager.update_available_resource {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 898.938109] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 898.938485] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 898.938525] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 898.938681] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63024) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 898.939601] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5234cda5-9d43-4124-a7a5-2baf923853b1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.947601] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dad026d-f915-442b-a100-e79ef0c6a7a2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.961867] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af488213-be90-4aac-a3db-8edb0ac78658 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.967753] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-320431d9-7288-4606-b9b9-d8daf27d86fc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.995407] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181319MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=63024) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 898.995514] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 898.995677] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 900.015967] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 900.016266] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 900.030287] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6a08d52-db96-4afc-a05a-96c7202464e2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.037908] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3327a90-27ab-49f1-9da5-b1465daa8874 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.067167] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcce137f-0189-48b3-a2a1-0fd37cb0604b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.073724] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d532336d-1fd4-40c1-906e-f971a658c235 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.086023] env[63024]: DEBUG nova.compute.provider_tree [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 900.588638] env[63024]: DEBUG nova.scheduler.client.report [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 900.589897] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63024) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 900.590099] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.594s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 902.072979] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 956.416485] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 956.416881] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 956.416881] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 957.413585] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 958.416173] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 958.416544] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Starting heal instance info cache {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10258}} [ 958.416544] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Rebuilding the list of instances to heal {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10262}} [ 958.919747] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Didn't find any instances for network info cache update. {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10344}} [ 958.920010] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 958.920161] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63024) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10877}} [ 959.416862] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager.update_available_resource {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 959.919839] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.920095] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 959.920264] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 959.920456] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63024) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 959.921395] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-995d7391-d7e0-42ce-b750-c97929c78465 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.929827] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39f29b16-48e2-4cfa-a3b7-70c4a0c7b24b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.943925] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc4e516f-24b9-494c-ad95-f47b96f63f7a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.950336] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a15a0a9-b558-4bec-8bf2-70540da7d980 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.979585] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181326MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=63024) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 959.979751] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 959.979922] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 961.012786] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 961.013081] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 961.029651] env[63024]: DEBUG nova.scheduler.client.report [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Refreshing inventories for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 961.041998] env[63024]: DEBUG nova.scheduler.client.report [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Updating ProviderTree inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 961.042183] env[63024]: DEBUG nova.compute.provider_tree [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 961.053834] env[63024]: DEBUG nova.scheduler.client.report [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Refreshing aggregate associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, aggregates: None {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 961.070562] env[63024]: DEBUG nova.scheduler.client.report [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Refreshing trait associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 961.082472] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e9d6f4f-0321-4350-b69b-e9767341aef6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.089900] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12de02cc-5bf6-43a0-84ec-11efbcd2af28 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.119511] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b43342a6-19dd-4273-ab1b-89f2b50cbbb2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.126239] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bd3484b-92a1-4c4e-b2ba-1fa851673d54 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.138711] env[63024]: DEBUG nova.compute.provider_tree [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 961.642233] env[63024]: DEBUG nova.scheduler.client.report [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 961.643491] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63024) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 961.643696] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.664s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 962.639431] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 962.639878] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 962.639878] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1016.416495] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1017.417248] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1018.417276] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1018.417709] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1018.417935] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63024) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10877}} [ 1019.417016] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager.update_available_resource {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1019.920685] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1019.921145] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1019.921207] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1019.921333] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63024) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1019.922211] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ba6e5c2-2645-4a36-8f5f-39072d219b5d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.930192] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a364b87-c2bb-4f54-9a3a-363ce5f44d31 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.943732] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b32820d4-baf3-4ec4-8690-ebc93ac363b3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.949604] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-990a9f6c-797d-486d-941d-a5e291c6e0f7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.978445] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181326MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=63024) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1019.978587] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1019.978759] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1020.995593] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1020.995812] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1021.008349] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab8dd517-ed7b-494d-ac7f-f37c6d3d1f97 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.016073] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90bc40f4-0b7c-4ada-8735-96232f6f9e37 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.045161] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfca647a-1e84-4ff7-8801-180f91dc11bd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.052006] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f277efa5-0b1c-4242-ae56-4c48a6156c16 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.064880] env[63024]: DEBUG nova.compute.provider_tree [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1021.568521] env[63024]: DEBUG nova.scheduler.client.report [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1021.569785] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63024) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1021.569968] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.591s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1022.570546] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1022.570963] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1022.570963] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Starting heal instance info cache {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10258}} [ 1022.571078] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Rebuilding the list of instances to heal {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10262}} [ 1023.075853] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Didn't find any instances for network info cache update. {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10344}} [ 1023.076148] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1023.076294] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1077.418346] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1078.413110] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1078.918736] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1078.918736] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1079.417793] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1079.417793] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63024) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10877}} [ 1080.417943] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1080.418403] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Starting heal instance info cache {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10258}} [ 1080.418403] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Rebuilding the list of instances to heal {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10262}} [ 1080.921075] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Didn't find any instances for network info cache update. {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10344}} [ 1080.921331] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager.update_available_resource {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1081.424780] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1081.425186] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1081.425228] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1081.425389] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63024) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1081.426268] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0721141-301d-47f3-9a3c-dbc685be9537 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.434108] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab111bf6-c509-4752-9fd1-b3215d75bf95 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.447805] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9740a6b7-0a8a-49d9-ac6e-a5fe5e18596b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.453894] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b87368f-afab-4dbb-b2df-a44d5bb31c79 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.483075] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181320MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=63024) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1081.483221] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1081.483489] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1082.506033] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1082.506033] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1082.522023] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15ef9a9e-d72c-4601-8685-61dc8013cb33 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.527326] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-994aab68-73a4-46b1-aa6c-aa0181aac57a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.555509] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fa6bede-5b68-4eb9-88d5-5e6b3cfb19d6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.562158] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b8016b3-dce3-452a-9fc8-4f1c00aea5d9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.574577] env[63024]: DEBUG nova.compute.provider_tree [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1083.079091] env[63024]: DEBUG nova.scheduler.client.report [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1083.080044] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63024) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1083.080275] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.597s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1083.576266] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1083.576648] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1084.412503] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1139.416786] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1140.418576] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1140.418576] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Starting heal instance info cache {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10258}} [ 1140.418576] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Rebuilding the list of instances to heal {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10262}} [ 1140.921061] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Didn't find any instances for network info cache update. {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10344}} [ 1140.921061] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1140.921061] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1141.417494] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1141.417494] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63024) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10877}} [ 1142.417802] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1142.417802] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager.update_available_resource {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1142.922277] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1142.922277] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1142.922277] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1142.922277] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63024) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1142.922277] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95f1a8f5-dfc3-4a08-b321-93948dd63177 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.931219] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfb1acc0-c048-4718-be83-b2213c5de3f0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.948068] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46a974fd-cc33-4332-a24d-79d3f3eb3c2f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.953765] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fb04d97-e27d-4066-8f52-545afd6a02af {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.985740] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181316MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=63024) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1142.986333] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1142.986715] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1144.008071] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1144.008071] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1144.022702] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6a37ba5-0d41-4350-9abc-578973242b9b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.030927] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d389e24a-5af4-428d-ba59-7bfe497009f7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.061090] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac1e6dfb-5510-42fd-a944-252a3a4c47c5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.068748] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-198e2666-ae18-4737-a9bb-96cc3eeadd5d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.082032] env[63024]: DEBUG nova.compute.provider_tree [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1144.585763] env[63024]: DEBUG nova.scheduler.client.report [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1144.587152] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63024) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1144.587374] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.601s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1145.587584] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1145.587879] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1195.417572] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1195.418076] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Cleaning up deleted instances with incomplete migration {{(pid=63024) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11596}} [ 1198.916064] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1199.420538] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1199.420699] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1201.920022] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1201.920468] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Starting heal instance info cache {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10258}} [ 1201.920468] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Rebuilding the list of instances to heal {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10262}} [ 1202.423644] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Didn't find any instances for network info cache update. {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10344}} [ 1202.423869] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1202.424057] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1202.424242] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1202.424371] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63024) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10877}} [ 1203.416968] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager.update_available_resource {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1203.920458] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1203.920734] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1203.920858] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1203.921015] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63024) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1203.921900] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0167a8a1-a289-4486-bec1-8a12011c5c43 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.929925] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e0e752e-2e38-4541-8893-b7866b6242d6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.944661] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e1a1ace-aa66-4796-afd5-bf9bfa11f2fc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.951508] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23401591-01b0-4906-91f3-4603ed2b7829 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.982018] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181311MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=63024) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1203.982224] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1203.982393] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1205.000661] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1205.000906] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1205.014431] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4334a32b-8c13-402b-b140-4911c53c284d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.022052] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e26f4f83-0b21-4e3c-8218-219fd20454fa {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.050708] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be0490a0-d57d-484b-a7b9-8f65098e7e77 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.057754] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47f44161-d165-4bc1-b7b4-0d7845e6578e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.070229] env[63024]: DEBUG nova.compute.provider_tree [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1205.573666] env[63024]: DEBUG nova.scheduler.client.report [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1205.574924] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63024) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1205.575123] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.593s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1206.571026] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1206.571429] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1206.571429] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1207.416691] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1207.416897] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Cleaning up deleted instances {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11558}} [ 1207.920260] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] There are 0 instances to clean {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11567}} [ 1224.548356] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._sync_power_states {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1225.051374] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Getting list of instances from cluster (obj){ [ 1225.051374] env[63024]: value = "domain-c8" [ 1225.051374] env[63024]: _type = "ClusterComputeResource" [ 1225.051374] env[63024]: } {{(pid=63024) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1225.052456] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-779c906e-9991-4e76-afc8-72cec0ae2387 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.061304] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Got total of 0 instances {{(pid=63024) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1259.930890] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1261.416471] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1261.416913] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Starting heal instance info cache {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10258}} [ 1261.416913] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Rebuilding the list of instances to heal {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10262}} [ 1261.920236] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Didn't find any instances for network info cache update. {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10344}} [ 1261.920502] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1262.417373] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1263.417829] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1263.418215] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63024) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10877}} [ 1263.418215] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager.update_available_resource {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1263.922065] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1263.922326] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1263.922487] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1263.922639] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63024) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1263.923615] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-065e306e-6677-429e-8dc3-db2525b67f81 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.932150] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2daff71-4a2e-4260-8589-5fda93e90da2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.946100] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73839702-8f95-4f80-8e2e-c37aa1abe12c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.952473] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc873aa2-c141-4511-a963-964700cc9ac6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.981853] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181324MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=63024) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1263.982050] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1263.982152] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1265.097412] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1265.097662] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1265.113169] env[63024]: DEBUG nova.scheduler.client.report [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Refreshing inventories for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1265.124273] env[63024]: DEBUG nova.scheduler.client.report [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Updating ProviderTree inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1265.124448] env[63024]: DEBUG nova.compute.provider_tree [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1265.133691] env[63024]: DEBUG nova.scheduler.client.report [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Refreshing aggregate associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, aggregates: None {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1265.148514] env[63024]: DEBUG nova.scheduler.client.report [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Refreshing trait associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1265.159914] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40bba1ab-4e6f-4976-a899-d8bfd5b18b75 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.167472] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a4c3ec9-1402-4e10-96a5-5d9f4fa46d09 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.196726] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e20ef4f-89f6-424a-bbb1-ce9b93f47d7c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.203630] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f686fd3-012c-4a67-b9b9-a58746a3e5e4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1265.215549] env[63024]: DEBUG nova.compute.provider_tree [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1265.719163] env[63024]: DEBUG nova.scheduler.client.report [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1265.720419] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63024) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1265.720599] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.738s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1266.715066] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1266.715398] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1266.715483] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1319.417994] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1320.413206] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1322.418027] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1323.417310] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1323.417578] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Starting heal instance info cache {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10258}} [ 1323.417618] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Rebuilding the list of instances to heal {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10262}} [ 1323.920491] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Didn't find any instances for network info cache update. {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10344}} [ 1323.920872] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1323.920872] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager.update_available_resource {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1324.423691] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1324.423936] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1324.424112] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1324.424275] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63024) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1324.425209] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95248b56-888e-4618-a595-3fb6fc318a0b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.433330] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95006f5d-5bcf-47fd-a832-50c94867ef55 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.447293] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9342dba3-4995-4969-a412-030b0248d8e0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.453729] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4176d258-eb55-4c47-a3c4-f1894dec1646 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.482172] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181314MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=63024) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1324.482390] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1324.482720] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1325.502986] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1325.503260] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1325.516400] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cd70fbd-13f0-452e-89d0-f09b9f7cfbc7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.523910] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a4e07b5-fc46-4cb2-b1fe-bc38c626229d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.553548] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4906e7e-73b5-41b4-a9ec-4f8b7439c037 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.560712] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-401dfba1-c945-4438-b1d7-6f369ef1e56f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.573530] env[63024]: DEBUG nova.compute.provider_tree [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1326.076385] env[63024]: DEBUG nova.scheduler.client.report [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1326.077716] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63024) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1326.077898] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.595s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1327.575182] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1327.575559] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1327.575604] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1327.575739] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1327.575878] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63024) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10877}} [ 1380.416483] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1383.418976] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1384.417479] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1384.417713] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Starting heal instance info cache {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10258}} [ 1384.417798] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Rebuilding the list of instances to heal {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10262}} [ 1384.920587] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Didn't find any instances for network info cache update. {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10344}} [ 1384.920976] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1385.416549] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager.update_available_resource {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1385.920649] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1385.920953] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1385.921040] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1385.921282] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63024) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1385.922549] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af2854dd-186e-4f97-adc2-b9635ac4a7f5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.931886] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d225034c-00f0-4e03-8308-883c34551a6e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.946160] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93a5202a-020c-42ce-811c-d66d74f572b1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.952409] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea5147aa-c595-4149-a780-bfebd0b3606d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1385.981067] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181326MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=63024) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1385.981230] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1385.981415] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1387.000914] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1387.001232] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1387.015454] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-954dda6f-d4ec-4c2d-8ed3-806224d5f366 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.022800] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56b30a8d-9180-4700-8119-49b2f04f01a3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.052616] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96cfa86a-810a-4457-90ea-10321c7e7e09 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.059181] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-121d6897-81e9-4c11-803d-09f0afadce61 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1387.071403] env[63024]: DEBUG nova.compute.provider_tree [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1387.574611] env[63024]: DEBUG nova.scheduler.client.report [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1387.575935] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63024) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1387.576132] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.595s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1388.571852] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1388.573215] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1388.573215] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1388.573215] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1388.573215] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63024) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10877}} [ 1441.412809] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1442.417415] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1444.416586] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1444.416957] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1445.417278] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1445.417655] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Starting heal instance info cache {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10258}} [ 1445.417655] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Rebuilding the list of instances to heal {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10262}} [ 1445.920512] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Didn't find any instances for network info cache update. {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10344}} [ 1445.920754] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager.update_available_resource {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1446.423662] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1446.424042] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1446.424094] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1446.424208] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63024) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1446.425130] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c5bb565-166c-431b-8f8a-c505c0868e05 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.433052] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ff1202a-ae11-4bab-a35b-cfa1813f2e66 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.446214] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeb6c704-0155-40a5-8d91-b484ad9b706d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.452414] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3945079-aa16-41cf-adda-403431e1cea0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.480379] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181319MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=63024) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1446.480543] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1446.480720] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1447.497877] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1447.498148] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1447.510720] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5fa4cb0-701d-4667-9321-ad33ef228ee0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.518037] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6d8ca1d-7ff1-4ec3-99de-b5f3d4570a1b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.547417] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e0319b7-f0f4-47db-a891-1d4a0e38ab7f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.553967] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56711850-026b-429f-bd58-a1bbaf05ecfe {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.566393] env[63024]: DEBUG nova.compute.provider_tree [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1448.069653] env[63024]: DEBUG nova.scheduler.client.report [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1448.070991] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63024) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1448.071215] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.590s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1448.567904] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1448.568316] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1448.568361] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1449.417542] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1449.417820] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63024) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10877}} [ 1504.419217] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1505.417628] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1505.417938] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager.update_available_resource {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1505.921254] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1505.921635] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1505.921686] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1505.921819] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63024) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1505.922763] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-931cb1cc-8ca0-4c8b-96c8-f924034cc546 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.931399] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81193e4e-c205-4c83-8417-8575a3816a67 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.946321] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5460d945-36f5-4821-ad7a-40867c707702 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.952478] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1431f896-6d09-4e17-9ea5-3c2264f431fc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1505.981015] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181309MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=63024) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1505.981185] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1505.981417] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1506.998883] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1506.999154] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1507.011506] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89d613cd-4e12-407e-9dcd-42200b76e2a3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.019234] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28b8d926-61a8-4e68-9d89-20ffa784eb7b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.049272] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f5e726a-9b2a-4e58-989e-3d235fb88b0d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.055590] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fd672e8-170e-4d47-b639-16fe4242f7b9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1507.068043] env[63024]: DEBUG nova.compute.provider_tree [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1507.571305] env[63024]: DEBUG nova.scheduler.client.report [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1507.572650] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63024) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1507.572834] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 1.591s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1507.573050] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1507.573185] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Cleaning up deleted instances with incomplete migration {{(pid=63024) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11596}} [ 1508.416601] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1508.416865] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1508.416991] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Starting heal instance info cache {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10258}} [ 1508.417128] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Rebuilding the list of instances to heal {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10262}} [ 1508.920212] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Didn't find any instances for network info cache update. {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10344}} [ 1508.920434] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1508.920603] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1508.920748] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1508.920925] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1511.919237] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1511.919747] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63024) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10877}} [ 1520.419719] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1520.420176] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Cleaning up deleted instances {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11558}} [ 1520.923445] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] There are 0 instances to clean {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11567}} [ 1541.471215] env[63024]: DEBUG oslo_concurrency.lockutils [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Acquiring lock "f90f35a2-f2ee-45e2-a9e4-afce50f29aa0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1541.471944] env[63024]: DEBUG oslo_concurrency.lockutils [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Lock "f90f35a2-f2ee-45e2-a9e4-afce50f29aa0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1541.978018] env[63024]: DEBUG nova.compute.manager [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1542.499444] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquiring lock "b0b4d94c-cd5c-4452-baa6-9aeec46b43ad" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1542.499908] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "b0b4d94c-cd5c-4452-baa6-9aeec46b43ad" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1542.533570] env[63024]: DEBUG oslo_concurrency.lockutils [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1542.533873] env[63024]: DEBUG oslo_concurrency.lockutils [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1542.539265] env[63024]: INFO nova.compute.claims [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1543.005854] env[63024]: DEBUG nova.compute.manager [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1543.407747] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Acquiring lock "ccd80e20-9fc2-415a-a428-fcf85994c7f8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1543.408013] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Lock "ccd80e20-9fc2-415a-a428-fcf85994c7f8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1543.534760] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1543.613272] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce2752f6-f07f-4ce6-8fa0-7ed76b0ef415 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.621859] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e0842b2-e448-4cde-b19e-4948ba450c46 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.660013] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae0f7ee4-0f8f-47d8-acd2-8d43ff2169c3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.668605] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1acdddfb-2c82-44a2-bed0-30c46cb4fe36 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1543.683336] env[63024]: DEBUG nova.compute.provider_tree [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1543.911671] env[63024]: DEBUG nova.compute.manager [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1544.190241] env[63024]: DEBUG nova.scheduler.client.report [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1544.426560] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Acquiring lock "b629b4f8-f79f-4361-b78c-8705a6888a9e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1544.426807] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Lock "b629b4f8-f79f-4361-b78c-8705a6888a9e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1544.450072] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1544.697654] env[63024]: DEBUG oslo_concurrency.lockutils [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.164s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1544.699995] env[63024]: DEBUG nova.compute.manager [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1544.703098] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.168s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1544.707015] env[63024]: INFO nova.compute.claims [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1544.930303] env[63024]: DEBUG nova.compute.manager [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1545.214428] env[63024]: DEBUG nova.compute.utils [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1545.215079] env[63024]: DEBUG nova.compute.manager [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1545.215352] env[63024]: DEBUG nova.network.neutron [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1545.460445] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1545.579018] env[63024]: DEBUG nova.policy [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9fbf54f1bffb4547906415722a5625ea', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c3f84db03a9047f0bb937929cb979cf2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1545.726985] env[63024]: DEBUG nova.compute.manager [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1545.823703] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16c946dd-472e-4ddd-8304-afe81749c7b4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.831977] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-829d52c6-11a1-41e9-867b-9fbe7564dce0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.865019] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11f51405-fbc1-4a36-9016-27d4befbd148 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.872112] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15a63fb1-d785-4f89-8bb7-17092f4e9488 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.885851] env[63024]: DEBUG nova.compute.provider_tree [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1546.358401] env[63024]: DEBUG nova.network.neutron [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Successfully created port: 9aa976e0-a2e8-4311-adcb-79d911535253 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1546.392094] env[63024]: DEBUG nova.scheduler.client.report [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1546.744780] env[63024]: DEBUG nova.compute.manager [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1546.786619] env[63024]: DEBUG nova.virt.hardware [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1546.786854] env[63024]: DEBUG nova.virt.hardware [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1546.787682] env[63024]: DEBUG nova.virt.hardware [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1546.787682] env[63024]: DEBUG nova.virt.hardware [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1546.787682] env[63024]: DEBUG nova.virt.hardware [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1546.787682] env[63024]: DEBUG nova.virt.hardware [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1546.787682] env[63024]: DEBUG nova.virt.hardware [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1546.787880] env[63024]: DEBUG nova.virt.hardware [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1546.788736] env[63024]: DEBUG nova.virt.hardware [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1546.792253] env[63024]: DEBUG nova.virt.hardware [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1546.792479] env[63024]: DEBUG nova.virt.hardware [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1546.793383] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d4b571b-7259-4e25-a8a4-6b0d562ce804 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.805753] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16ba71f6-dda3-49d8-9bbd-e2eb7cd8a2a7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.829823] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f81cc27-2bae-4f05-9e0e-fdba6804e19b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.899965] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.197s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1546.900901] env[63024]: DEBUG nova.compute.manager [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1546.904563] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.455s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1546.906608] env[63024]: INFO nova.compute.claims [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1547.412303] env[63024]: DEBUG nova.compute.utils [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1547.416027] env[63024]: DEBUG nova.compute.manager [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1547.416027] env[63024]: DEBUG nova.network.neutron [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1547.469532] env[63024]: DEBUG nova.policy [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '54b7a5c8406e44e3a00cf903bc74e48d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '99c4328f2c8c4139b4eace4b465e37e3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1547.915992] env[63024]: DEBUG nova.compute.manager [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1548.011311] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4917ce93-f947-4a36-aa95-edb7ec67cff6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.021931] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7fd54fb-7413-4d55-8b5e-e18775c2b5d9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.056445] env[63024]: DEBUG nova.network.neutron [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Successfully created port: 07391e4a-67fc-4ff7-8491-8ca4469c68e9 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1548.058975] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bae0800e-0d5c-4252-bcf9-e0cceef6bc1d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.073417] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6d5b83c-23b2-4978-b512-deafbbc779e9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.087305] env[63024]: DEBUG nova.compute.provider_tree [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1548.593592] env[63024]: DEBUG nova.scheduler.client.report [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1548.933668] env[63024]: DEBUG nova.compute.manager [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1548.967117] env[63024]: DEBUG nova.virt.hardware [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1548.967117] env[63024]: DEBUG nova.virt.hardware [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1548.967117] env[63024]: DEBUG nova.virt.hardware [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1548.967117] env[63024]: DEBUG nova.virt.hardware [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1548.967309] env[63024]: DEBUG nova.virt.hardware [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1548.967399] env[63024]: DEBUG nova.virt.hardware [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1548.967736] env[63024]: DEBUG nova.virt.hardware [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1548.968043] env[63024]: DEBUG nova.virt.hardware [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1548.968323] env[63024]: DEBUG nova.virt.hardware [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1548.968644] env[63024]: DEBUG nova.virt.hardware [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1548.969141] env[63024]: DEBUG nova.virt.hardware [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1548.972015] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26b09855-9b88-4e46-96d0-bc6bc9b3f4b8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1548.980213] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1bd2f13-c565-47be-a0c3-0bb5ccde9713 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1549.099194] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.195s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1549.100517] env[63024]: DEBUG nova.compute.manager [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1549.104170] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.644s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1549.108880] env[63024]: INFO nova.compute.claims [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1549.356038] env[63024]: DEBUG nova.network.neutron [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Successfully updated port: 9aa976e0-a2e8-4311-adcb-79d911535253 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1549.616407] env[63024]: DEBUG nova.compute.utils [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1549.621110] env[63024]: DEBUG nova.compute.manager [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1549.621361] env[63024]: DEBUG nova.network.neutron [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1549.752091] env[63024]: DEBUG nova.policy [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f3ab8b8a4c964062a5f379b46149de59', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5489a064ee1d44f0bd6c496f4775b9d6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1549.860628] env[63024]: DEBUG oslo_concurrency.lockutils [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Acquiring lock "refresh_cache-f90f35a2-f2ee-45e2-a9e4-afce50f29aa0" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1549.860628] env[63024]: DEBUG oslo_concurrency.lockutils [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Acquired lock "refresh_cache-f90f35a2-f2ee-45e2-a9e4-afce50f29aa0" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1549.860628] env[63024]: DEBUG nova.network.neutron [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1550.128920] env[63024]: DEBUG nova.compute.manager [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1550.229264] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a8e798a-5478-48e7-915b-a27ba7c17b48 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.240396] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f88f170d-51fb-403e-95f6-79af6de39001 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.277308] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e1bf3da-771c-4142-a874-03fad10c9bd2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.286759] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae2e7c72-9903-4cea-93dc-36b910b537b3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1550.310992] env[63024]: DEBUG nova.compute.provider_tree [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1550.321858] env[63024]: DEBUG nova.network.neutron [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Successfully updated port: 07391e4a-67fc-4ff7-8491-8ca4469c68e9 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1550.445757] env[63024]: DEBUG nova.network.neutron [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1550.529105] env[63024]: DEBUG nova.compute.manager [req-7375b4a2-0e09-4fba-9a9b-8b4959f13b69 req-4b78a5b2-fc8e-407f-8500-7abebdb0536c service nova] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Received event network-vif-plugged-9aa976e0-a2e8-4311-adcb-79d911535253 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1550.529105] env[63024]: DEBUG oslo_concurrency.lockutils [req-7375b4a2-0e09-4fba-9a9b-8b4959f13b69 req-4b78a5b2-fc8e-407f-8500-7abebdb0536c service nova] Acquiring lock "f90f35a2-f2ee-45e2-a9e4-afce50f29aa0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1550.529105] env[63024]: DEBUG oslo_concurrency.lockutils [req-7375b4a2-0e09-4fba-9a9b-8b4959f13b69 req-4b78a5b2-fc8e-407f-8500-7abebdb0536c service nova] Lock "f90f35a2-f2ee-45e2-a9e4-afce50f29aa0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1550.529105] env[63024]: DEBUG oslo_concurrency.lockutils [req-7375b4a2-0e09-4fba-9a9b-8b4959f13b69 req-4b78a5b2-fc8e-407f-8500-7abebdb0536c service nova] Lock "f90f35a2-f2ee-45e2-a9e4-afce50f29aa0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1550.529105] env[63024]: DEBUG nova.compute.manager [req-7375b4a2-0e09-4fba-9a9b-8b4959f13b69 req-4b78a5b2-fc8e-407f-8500-7abebdb0536c service nova] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] No waiting events found dispatching network-vif-plugged-9aa976e0-a2e8-4311-adcb-79d911535253 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1550.530205] env[63024]: WARNING nova.compute.manager [req-7375b4a2-0e09-4fba-9a9b-8b4959f13b69 req-4b78a5b2-fc8e-407f-8500-7abebdb0536c service nova] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Received unexpected event network-vif-plugged-9aa976e0-a2e8-4311-adcb-79d911535253 for instance with vm_state building and task_state spawning. [ 1550.814374] env[63024]: DEBUG nova.scheduler.client.report [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1550.825995] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquiring lock "refresh_cache-b0b4d94c-cd5c-4452-baa6-9aeec46b43ad" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1550.826154] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquired lock "refresh_cache-b0b4d94c-cd5c-4452-baa6-9aeec46b43ad" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1550.826298] env[63024]: DEBUG nova.network.neutron [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1550.952027] env[63024]: DEBUG nova.network.neutron [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Updating instance_info_cache with network_info: [{"id": "9aa976e0-a2e8-4311-adcb-79d911535253", "address": "fa:16:3e:37:9d:a9", "network": {"id": "b22bfe1a-f169-41c3-ac9b-fdcf93268110", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.20", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d78401abb63840f4b461856cfdb6dbbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9aa976e0-a2", "ovs_interfaceid": "9aa976e0-a2e8-4311-adcb-79d911535253", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1551.140129] env[63024]: DEBUG nova.compute.manager [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1551.176154] env[63024]: DEBUG nova.virt.hardware [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1551.176434] env[63024]: DEBUG nova.virt.hardware [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1551.176561] env[63024]: DEBUG nova.virt.hardware [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1551.176736] env[63024]: DEBUG nova.virt.hardware [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1551.177680] env[63024]: DEBUG nova.virt.hardware [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1551.178070] env[63024]: DEBUG nova.virt.hardware [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1551.178740] env[63024]: DEBUG nova.virt.hardware [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1551.179268] env[63024]: DEBUG nova.virt.hardware [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1551.179505] env[63024]: DEBUG nova.virt.hardware [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1551.179710] env[63024]: DEBUG nova.virt.hardware [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1551.180279] env[63024]: DEBUG nova.virt.hardware [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1551.181415] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb5b0fae-7848-42ce-9a3e-ee7ba27fbd6a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.191091] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76818da8-eb0b-42ec-8bf9-150471b567e4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.206431] env[63024]: DEBUG nova.network.neutron [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Successfully created port: 223e2d99-de00-4474-8225-7fee75ac28d6 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1551.321696] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.218s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1551.322270] env[63024]: DEBUG nova.compute.manager [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1551.421922] env[63024]: DEBUG nova.network.neutron [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1551.460619] env[63024]: DEBUG oslo_concurrency.lockutils [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Releasing lock "refresh_cache-f90f35a2-f2ee-45e2-a9e4-afce50f29aa0" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1551.460619] env[63024]: DEBUG nova.compute.manager [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Instance network_info: |[{"id": "9aa976e0-a2e8-4311-adcb-79d911535253", "address": "fa:16:3e:37:9d:a9", "network": {"id": "b22bfe1a-f169-41c3-ac9b-fdcf93268110", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.20", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d78401abb63840f4b461856cfdb6dbbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9aa976e0-a2", "ovs_interfaceid": "9aa976e0-a2e8-4311-adcb-79d911535253", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1551.460822] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:37:9d:a9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8652fa2-e608-4fe6-8b35-402a40906b40', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9aa976e0-a2e8-4311-adcb-79d911535253', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1551.477107] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1551.481764] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b4597598-17f6-4ca7-b954-6a3bcf9c6f69 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.498583] env[63024]: DEBUG nova.compute.manager [req-82105e17-c977-4138-9fbe-4a8014f1a615 req-dc071e6f-7680-43f8-b14f-0593b2f593b8 service nova] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Received event network-vif-plugged-07391e4a-67fc-4ff7-8491-8ca4469c68e9 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1551.498901] env[63024]: DEBUG oslo_concurrency.lockutils [req-82105e17-c977-4138-9fbe-4a8014f1a615 req-dc071e6f-7680-43f8-b14f-0593b2f593b8 service nova] Acquiring lock "b0b4d94c-cd5c-4452-baa6-9aeec46b43ad-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1551.499229] env[63024]: DEBUG oslo_concurrency.lockutils [req-82105e17-c977-4138-9fbe-4a8014f1a615 req-dc071e6f-7680-43f8-b14f-0593b2f593b8 service nova] Lock "b0b4d94c-cd5c-4452-baa6-9aeec46b43ad-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1551.499318] env[63024]: DEBUG oslo_concurrency.lockutils [req-82105e17-c977-4138-9fbe-4a8014f1a615 req-dc071e6f-7680-43f8-b14f-0593b2f593b8 service nova] Lock "b0b4d94c-cd5c-4452-baa6-9aeec46b43ad-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1551.499470] env[63024]: DEBUG nova.compute.manager [req-82105e17-c977-4138-9fbe-4a8014f1a615 req-dc071e6f-7680-43f8-b14f-0593b2f593b8 service nova] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] No waiting events found dispatching network-vif-plugged-07391e4a-67fc-4ff7-8491-8ca4469c68e9 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1551.499750] env[63024]: WARNING nova.compute.manager [req-82105e17-c977-4138-9fbe-4a8014f1a615 req-dc071e6f-7680-43f8-b14f-0593b2f593b8 service nova] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Received unexpected event network-vif-plugged-07391e4a-67fc-4ff7-8491-8ca4469c68e9 for instance with vm_state building and task_state spawning. [ 1551.501621] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Created folder: OpenStack in parent group-v4. [ 1551.501840] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Creating folder: Project (c3f84db03a9047f0bb937929cb979cf2). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1551.502311] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7294c4f8-da13-4dd9-bd38-d552f524d274 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.514138] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Created folder: Project (c3f84db03a9047f0bb937929cb979cf2) in parent group-v401959. [ 1551.514138] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Creating folder: Instances. Parent ref: group-v401960. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1551.516098] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-68cf06ee-9a88-40b8-8b6b-e1a1ac80b30b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.523774] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Created folder: Instances in parent group-v401960. [ 1551.525093] env[63024]: DEBUG oslo.service.loopingcall [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1551.525093] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1551.525093] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a202959c-4a6c-4472-b752-06c58755fc24 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1551.545859] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1551.545859] env[63024]: value = "task-1950314" [ 1551.545859] env[63024]: _type = "Task" [ 1551.545859] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1551.562099] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950314, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1551.827363] env[63024]: DEBUG nova.compute.utils [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1551.828769] env[63024]: DEBUG nova.compute.manager [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1551.828957] env[63024]: DEBUG nova.network.neutron [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1552.058717] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950314, 'name': CreateVM_Task, 'duration_secs': 0.446186} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1552.058902] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1552.078017] env[63024]: DEBUG nova.policy [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '67d3796d3d5d4eebba5ecf8e611190c9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '751ed00ef16a4cca832e3c78731c9379', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1552.083494] env[63024]: DEBUG oslo_vmware.service [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2df7f32-9f3b-4da8-9368-a3695032d5db {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.089248] env[63024]: DEBUG oslo_concurrency.lockutils [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1552.089248] env[63024]: DEBUG oslo_concurrency.lockutils [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1552.090313] env[63024]: DEBUG oslo_concurrency.lockutils [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1552.090313] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11d5d143-fab8-48d3-a942-a5c46dcce0a8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.095835] env[63024]: DEBUG oslo_vmware.api [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Waiting for the task: (returnval){ [ 1552.095835] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52bac3e2-ea16-4e90-f8eb-f8212237ca4a" [ 1552.095835] env[63024]: _type = "Task" [ 1552.095835] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.103804] env[63024]: DEBUG oslo_vmware.api [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52bac3e2-ea16-4e90-f8eb-f8212237ca4a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.278923] env[63024]: DEBUG nova.network.neutron [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Updating instance_info_cache with network_info: [{"id": "07391e4a-67fc-4ff7-8491-8ca4469c68e9", "address": "fa:16:3e:54:a1:06", "network": {"id": "0719de66-1f31-4596-a9a1-11d65b13c2e5", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1221667646-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "99c4328f2c8c4139b4eace4b465e37e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e0c77754-4085-434b-a3e8-d61be099ac67", "external-id": "nsx-vlan-transportzone-822", "segmentation_id": 822, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07391e4a-67", "ovs_interfaceid": "07391e4a-67fc-4ff7-8491-8ca4469c68e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1552.332835] env[63024]: DEBUG nova.compute.manager [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1552.612808] env[63024]: DEBUG oslo_concurrency.lockutils [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1552.612808] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1552.612808] env[63024]: DEBUG oslo_concurrency.lockutils [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1552.612808] env[63024]: DEBUG oslo_concurrency.lockutils [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1552.613613] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1552.613868] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-85e75c65-cbe2-456e-adfd-e602bb2ec54c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.633364] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1552.633547] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1552.634472] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef1ab18f-aaf6-4b7e-aa16-fd4dd2419ba6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.644147] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f1472c85-eb88-4d11-b70e-7f7730ac057d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.651325] env[63024]: DEBUG oslo_vmware.api [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Waiting for the task: (returnval){ [ 1552.651325] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52fad050-bdad-5007-87b3-debe78fc7d1f" [ 1552.651325] env[63024]: _type = "Task" [ 1552.651325] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.660893] env[63024]: DEBUG oslo_vmware.api [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52fad050-bdad-5007-87b3-debe78fc7d1f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1552.783326] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Releasing lock "refresh_cache-b0b4d94c-cd5c-4452-baa6-9aeec46b43ad" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1552.783657] env[63024]: DEBUG nova.compute.manager [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Instance network_info: |[{"id": "07391e4a-67fc-4ff7-8491-8ca4469c68e9", "address": "fa:16:3e:54:a1:06", "network": {"id": "0719de66-1f31-4596-a9a1-11d65b13c2e5", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1221667646-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "99c4328f2c8c4139b4eace4b465e37e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e0c77754-4085-434b-a3e8-d61be099ac67", "external-id": "nsx-vlan-transportzone-822", "segmentation_id": 822, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07391e4a-67", "ovs_interfaceid": "07391e4a-67fc-4ff7-8491-8ca4469c68e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1552.784077] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:54:a1:06', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e0c77754-4085-434b-a3e8-d61be099ac67', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '07391e4a-67fc-4ff7-8491-8ca4469c68e9', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1552.794187] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Creating folder: Project (99c4328f2c8c4139b4eace4b465e37e3). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1552.794973] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9e404c4f-fc05-47e4-ae0d-58213c9f9acf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.811029] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Created folder: Project (99c4328f2c8c4139b4eace4b465e37e3) in parent group-v401959. [ 1552.811029] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Creating folder: Instances. Parent ref: group-v401963. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1552.811029] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c52f3899-e30f-4e62-bb83-afdcf97fc0ed {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.820255] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Created folder: Instances in parent group-v401963. [ 1552.820255] env[63024]: DEBUG oslo.service.loopingcall [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1552.820255] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1552.820255] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ed3f6562-ddcb-4293-aa4b-27a5e7774a1b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1552.848772] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1552.848772] env[63024]: value = "task-1950317" [ 1552.848772] env[63024]: _type = "Task" [ 1552.848772] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1552.858837] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950317, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.169093] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Preparing fetch location {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1553.169443] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Creating directory with path [datastore1] vmware_temp/8d443573-4952-4be8-a0ec-3d2da10bf9d3/2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1553.169684] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d67ae9f9-7374-45ce-9960-07458b98ae4a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.200434] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Created directory with path [datastore1] vmware_temp/8d443573-4952-4be8-a0ec-3d2da10bf9d3/2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1553.200658] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Fetch image to [datastore1] vmware_temp/8d443573-4952-4be8-a0ec-3d2da10bf9d3/2646ca61-612e-4bc3-97f7-ee492c048835/tmp-sparse.vmdk {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1553.200748] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Downloading image file data 2646ca61-612e-4bc3-97f7-ee492c048835 to [datastore1] vmware_temp/8d443573-4952-4be8-a0ec-3d2da10bf9d3/2646ca61-612e-4bc3-97f7-ee492c048835/tmp-sparse.vmdk on the data store datastore1 {{(pid=63024) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1553.201669] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9c820bb-e3aa-4751-9e87-2159329987fa {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.210079] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43e5adb2-ee2e-4147-8f7e-82bf0d6928ce {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.224038] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5241459-6e4b-4aff-824d-07e1e05f36f2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.262956] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9838aec-82d4-4789-a14a-a88170062c2c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.267239] env[63024]: DEBUG nova.network.neutron [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Successfully created port: fe1aa30b-c99e-4641-9d91-c99d20670de0 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1553.274165] env[63024]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a7f3cd4b-8210-462a-8b25-78663b648a21 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.348348] env[63024]: DEBUG nova.compute.manager [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1553.364038] env[63024]: DEBUG nova.virt.vmwareapi.images [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Downloading image file data 2646ca61-612e-4bc3-97f7-ee492c048835 to the data store datastore1 {{(pid=63024) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1553.384720] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950317, 'name': CreateVM_Task, 'duration_secs': 0.452451} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1553.384807] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1553.385686] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1553.385930] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1553.386366] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1553.386558] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ea62f9a-ce1c-4e3b-b62b-1b1d3cbf5634 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.394218] env[63024]: DEBUG oslo_vmware.api [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1553.394218] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]527b5978-3cb8-9b7c-7d86-ea9f32c5c9fd" [ 1553.394218] env[63024]: _type = "Task" [ 1553.394218] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1553.407352] env[63024]: DEBUG oslo_vmware.api [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]527b5978-3cb8-9b7c-7d86-ea9f32c5c9fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1553.411988] env[63024]: DEBUG nova.virt.hardware [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1553.411988] env[63024]: DEBUG nova.virt.hardware [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1553.411988] env[63024]: DEBUG nova.virt.hardware [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1553.412185] env[63024]: DEBUG nova.virt.hardware [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1553.412242] env[63024]: DEBUG nova.virt.hardware [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1553.412378] env[63024]: DEBUG nova.virt.hardware [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1553.412577] env[63024]: DEBUG nova.virt.hardware [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1553.412723] env[63024]: DEBUG nova.virt.hardware [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1553.414257] env[63024]: DEBUG nova.virt.hardware [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1553.414257] env[63024]: DEBUG nova.virt.hardware [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1553.414257] env[63024]: DEBUG nova.virt.hardware [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1553.415748] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9f324ea-804e-4e0a-a4a7-205c9571472d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.424617] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b495ade3-a4f1-49a9-b200-9f924259c177 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1553.483270] env[63024]: DEBUG oslo_vmware.rw_handles [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8d443573-4952-4be8-a0ec-3d2da10bf9d3/2646ca61-612e-4bc3-97f7-ee492c048835/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=63024) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1553.708177] env[63024]: DEBUG nova.compute.manager [req-c2823af2-2b5e-473e-b99a-001c31b974df req-0f038852-8cb7-4799-811e-50f57fbb9113 service nova] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Received event network-changed-9aa976e0-a2e8-4311-adcb-79d911535253 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1553.708398] env[63024]: DEBUG nova.compute.manager [req-c2823af2-2b5e-473e-b99a-001c31b974df req-0f038852-8cb7-4799-811e-50f57fbb9113 service nova] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Refreshing instance network info cache due to event network-changed-9aa976e0-a2e8-4311-adcb-79d911535253. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1553.708587] env[63024]: DEBUG oslo_concurrency.lockutils [req-c2823af2-2b5e-473e-b99a-001c31b974df req-0f038852-8cb7-4799-811e-50f57fbb9113 service nova] Acquiring lock "refresh_cache-f90f35a2-f2ee-45e2-a9e4-afce50f29aa0" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1553.708731] env[63024]: DEBUG oslo_concurrency.lockutils [req-c2823af2-2b5e-473e-b99a-001c31b974df req-0f038852-8cb7-4799-811e-50f57fbb9113 service nova] Acquired lock "refresh_cache-f90f35a2-f2ee-45e2-a9e4-afce50f29aa0" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1553.709761] env[63024]: DEBUG nova.network.neutron [req-c2823af2-2b5e-473e-b99a-001c31b974df req-0f038852-8cb7-4799-811e-50f57fbb9113 service nova] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Refreshing network info cache for port 9aa976e0-a2e8-4311-adcb-79d911535253 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1553.916101] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1553.916101] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1553.916477] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1554.188180] env[63024]: DEBUG oslo_vmware.rw_handles [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Completed reading data from the image iterator. {{(pid=63024) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1554.188910] env[63024]: DEBUG oslo_vmware.rw_handles [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/8d443573-4952-4be8-a0ec-3d2da10bf9d3/2646ca61-612e-4bc3-97f7-ee492c048835/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=63024) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1554.327233] env[63024]: DEBUG nova.virt.vmwareapi.images [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Downloaded image file data 2646ca61-612e-4bc3-97f7-ee492c048835 to vmware_temp/8d443573-4952-4be8-a0ec-3d2da10bf9d3/2646ca61-612e-4bc3-97f7-ee492c048835/tmp-sparse.vmdk on the data store datastore1 {{(pid=63024) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1554.329265] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Caching image {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1554.329503] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Copying Virtual Disk [datastore1] vmware_temp/8d443573-4952-4be8-a0ec-3d2da10bf9d3/2646ca61-612e-4bc3-97f7-ee492c048835/tmp-sparse.vmdk to [datastore1] vmware_temp/8d443573-4952-4be8-a0ec-3d2da10bf9d3/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1554.329779] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-16b1fb0b-ef52-471b-8afc-317e55b278ab {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1554.338759] env[63024]: DEBUG oslo_vmware.api [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Waiting for the task: (returnval){ [ 1554.338759] env[63024]: value = "task-1950318" [ 1554.338759] env[63024]: _type = "Task" [ 1554.338759] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1554.352691] env[63024]: DEBUG oslo_vmware.api [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Task: {'id': task-1950318, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1554.678374] env[63024]: DEBUG nova.compute.manager [req-4fc09cf3-2201-41a8-8c96-ac3d0cb0db6d req-7541cdbc-0214-43f3-9717-43893a81ad56 service nova] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Received event network-changed-07391e4a-67fc-4ff7-8491-8ca4469c68e9 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1554.678374] env[63024]: DEBUG nova.compute.manager [req-4fc09cf3-2201-41a8-8c96-ac3d0cb0db6d req-7541cdbc-0214-43f3-9717-43893a81ad56 service nova] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Refreshing instance network info cache due to event network-changed-07391e4a-67fc-4ff7-8491-8ca4469c68e9. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1554.678374] env[63024]: DEBUG oslo_concurrency.lockutils [req-4fc09cf3-2201-41a8-8c96-ac3d0cb0db6d req-7541cdbc-0214-43f3-9717-43893a81ad56 service nova] Acquiring lock "refresh_cache-b0b4d94c-cd5c-4452-baa6-9aeec46b43ad" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1554.678374] env[63024]: DEBUG oslo_concurrency.lockutils [req-4fc09cf3-2201-41a8-8c96-ac3d0cb0db6d req-7541cdbc-0214-43f3-9717-43893a81ad56 service nova] Acquired lock "refresh_cache-b0b4d94c-cd5c-4452-baa6-9aeec46b43ad" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1554.679252] env[63024]: DEBUG nova.network.neutron [req-4fc09cf3-2201-41a8-8c96-ac3d0cb0db6d req-7541cdbc-0214-43f3-9717-43893a81ad56 service nova] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Refreshing network info cache for port 07391e4a-67fc-4ff7-8491-8ca4469c68e9 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1554.812719] env[63024]: DEBUG nova.network.neutron [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Successfully updated port: 223e2d99-de00-4474-8225-7fee75ac28d6 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1554.851862] env[63024]: DEBUG oslo_vmware.api [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Task: {'id': task-1950318, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.318555] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Acquiring lock "refresh_cache-ccd80e20-9fc2-415a-a428-fcf85994c7f8" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1555.318698] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Acquired lock "refresh_cache-ccd80e20-9fc2-415a-a428-fcf85994c7f8" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1555.318843] env[63024]: DEBUG nova.network.neutron [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1555.355856] env[63024]: DEBUG oslo_vmware.api [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Task: {'id': task-1950318, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.679726} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.356204] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Copied Virtual Disk [datastore1] vmware_temp/8d443573-4952-4be8-a0ec-3d2da10bf9d3/2646ca61-612e-4bc3-97f7-ee492c048835/tmp-sparse.vmdk to [datastore1] vmware_temp/8d443573-4952-4be8-a0ec-3d2da10bf9d3/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1555.356291] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Deleting the datastore file [datastore1] vmware_temp/8d443573-4952-4be8-a0ec-3d2da10bf9d3/2646ca61-612e-4bc3-97f7-ee492c048835/tmp-sparse.vmdk {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1555.356790] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bb75155b-a00f-4952-b773-ea3d0d24f7f5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.370170] env[63024]: DEBUG oslo_vmware.api [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Waiting for the task: (returnval){ [ 1555.370170] env[63024]: value = "task-1950319" [ 1555.370170] env[63024]: _type = "Task" [ 1555.370170] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.377359] env[63024]: DEBUG oslo_vmware.api [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Task: {'id': task-1950319, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.496832] env[63024]: DEBUG nova.network.neutron [req-c2823af2-2b5e-473e-b99a-001c31b974df req-0f038852-8cb7-4799-811e-50f57fbb9113 service nova] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Updated VIF entry in instance network info cache for port 9aa976e0-a2e8-4311-adcb-79d911535253. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1555.496832] env[63024]: DEBUG nova.network.neutron [req-c2823af2-2b5e-473e-b99a-001c31b974df req-0f038852-8cb7-4799-811e-50f57fbb9113 service nova] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Updating instance_info_cache with network_info: [{"id": "9aa976e0-a2e8-4311-adcb-79d911535253", "address": "fa:16:3e:37:9d:a9", "network": {"id": "b22bfe1a-f169-41c3-ac9b-fdcf93268110", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.20", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d78401abb63840f4b461856cfdb6dbbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9aa976e0-a2", "ovs_interfaceid": "9aa976e0-a2e8-4311-adcb-79d911535253", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1555.605919] env[63024]: DEBUG nova.network.neutron [req-4fc09cf3-2201-41a8-8c96-ac3d0cb0db6d req-7541cdbc-0214-43f3-9717-43893a81ad56 service nova] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Updated VIF entry in instance network info cache for port 07391e4a-67fc-4ff7-8491-8ca4469c68e9. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1555.606670] env[63024]: DEBUG nova.network.neutron [req-4fc09cf3-2201-41a8-8c96-ac3d0cb0db6d req-7541cdbc-0214-43f3-9717-43893a81ad56 service nova] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Updating instance_info_cache with network_info: [{"id": "07391e4a-67fc-4ff7-8491-8ca4469c68e9", "address": "fa:16:3e:54:a1:06", "network": {"id": "0719de66-1f31-4596-a9a1-11d65b13c2e5", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1221667646-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "99c4328f2c8c4139b4eace4b465e37e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e0c77754-4085-434b-a3e8-d61be099ac67", "external-id": "nsx-vlan-transportzone-822", "segmentation_id": 822, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07391e4a-67", "ovs_interfaceid": "07391e4a-67fc-4ff7-8491-8ca4469c68e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1555.881213] env[63024]: DEBUG oslo_vmware.api [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Task: {'id': task-1950319, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.023423} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1555.882153] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1555.882318] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Moving file from [datastore1] vmware_temp/8d443573-4952-4be8-a0ec-3d2da10bf9d3/2646ca61-612e-4bc3-97f7-ee492c048835 to [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835. {{(pid=63024) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 1555.882576] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-95e0c6e6-fafd-484c-b915-40803e234956 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1555.891623] env[63024]: DEBUG oslo_vmware.api [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Waiting for the task: (returnval){ [ 1555.891623] env[63024]: value = "task-1950320" [ 1555.891623] env[63024]: _type = "Task" [ 1555.891623] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1555.901714] env[63024]: DEBUG oslo_vmware.api [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Task: {'id': task-1950320, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1555.937939] env[63024]: DEBUG nova.network.neutron [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1555.999435] env[63024]: DEBUG oslo_concurrency.lockutils [req-c2823af2-2b5e-473e-b99a-001c31b974df req-0f038852-8cb7-4799-811e-50f57fbb9113 service nova] Releasing lock "refresh_cache-f90f35a2-f2ee-45e2-a9e4-afce50f29aa0" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1556.112874] env[63024]: DEBUG oslo_concurrency.lockutils [req-4fc09cf3-2201-41a8-8c96-ac3d0cb0db6d req-7541cdbc-0214-43f3-9717-43893a81ad56 service nova] Releasing lock "refresh_cache-b0b4d94c-cd5c-4452-baa6-9aeec46b43ad" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1556.371366] env[63024]: DEBUG nova.network.neutron [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Updating instance_info_cache with network_info: [{"id": "223e2d99-de00-4474-8225-7fee75ac28d6", "address": "fa:16:3e:83:4e:39", "network": {"id": "f42f7ff4-b2ef-45fd-8230-5f16271d5808", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-607041553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "5489a064ee1d44f0bd6c496f4775b9d6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56834f67-27a8-43dc-bbc6-a74aaa08959b", "external-id": "nsx-vlan-transportzone-949", "segmentation_id": 949, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap223e2d99-de", "ovs_interfaceid": "223e2d99-de00-4474-8225-7fee75ac28d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1556.410514] env[63024]: DEBUG oslo_vmware.api [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Task: {'id': task-1950320, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.033135} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1556.410514] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] File moved {{(pid=63024) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 1556.410514] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Cleaning up location [datastore1] vmware_temp/8d443573-4952-4be8-a0ec-3d2da10bf9d3 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1556.410514] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Deleting the datastore file [datastore1] vmware_temp/8d443573-4952-4be8-a0ec-3d2da10bf9d3 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1556.410514] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-55d43f8b-a249-4b51-be7f-d4ff03a915d4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.420959] env[63024]: DEBUG oslo_vmware.api [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Waiting for the task: (returnval){ [ 1556.420959] env[63024]: value = "task-1950321" [ 1556.420959] env[63024]: _type = "Task" [ 1556.420959] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1556.435098] env[63024]: DEBUG oslo_vmware.api [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Task: {'id': task-1950321, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.845763] env[63024]: DEBUG oslo_concurrency.lockutils [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Acquiring lock "f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1556.846011] env[63024]: DEBUG oslo_concurrency.lockutils [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Lock "f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1556.877240] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Releasing lock "refresh_cache-ccd80e20-9fc2-415a-a428-fcf85994c7f8" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1556.877240] env[63024]: DEBUG nova.compute.manager [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Instance network_info: |[{"id": "223e2d99-de00-4474-8225-7fee75ac28d6", "address": "fa:16:3e:83:4e:39", "network": {"id": "f42f7ff4-b2ef-45fd-8230-5f16271d5808", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-607041553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "5489a064ee1d44f0bd6c496f4775b9d6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56834f67-27a8-43dc-bbc6-a74aaa08959b", "external-id": "nsx-vlan-transportzone-949", "segmentation_id": 949, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap223e2d99-de", "ovs_interfaceid": "223e2d99-de00-4474-8225-7fee75ac28d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1556.877380] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:83:4e:39', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56834f67-27a8-43dc-bbc6-a74aaa08959b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '223e2d99-de00-4474-8225-7fee75ac28d6', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1556.885467] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Creating folder: Project (5489a064ee1d44f0bd6c496f4775b9d6). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1556.886331] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3716a5c6-ba5f-475f-9c4c-d4852f58e170 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.897028] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Created folder: Project (5489a064ee1d44f0bd6c496f4775b9d6) in parent group-v401959. [ 1556.897238] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Creating folder: Instances. Parent ref: group-v401966. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1556.897472] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-12e60388-51ab-4075-9b72-5ee0652b4ea2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.907091] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Created folder: Instances in parent group-v401966. [ 1556.907091] env[63024]: DEBUG oslo.service.loopingcall [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1556.907172] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1556.907458] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0c87d59a-d8f4-45e4-9c03-1d201565ad4e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.931797] env[63024]: DEBUG oslo_vmware.api [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Task: {'id': task-1950321, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.024297} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1556.933254] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1556.936135] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1556.936135] env[63024]: value = "task-1950324" [ 1556.936135] env[63024]: _type = "Task" [ 1556.936135] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1556.936135] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-37fac9d4-4840-4a27-b6d7-ea6de7101c00 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1556.942925] env[63024]: DEBUG oslo_vmware.api [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Waiting for the task: (returnval){ [ 1556.942925] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]521475e0-e035-56cb-aedc-2bc008c96314" [ 1556.942925] env[63024]: _type = "Task" [ 1556.942925] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1556.946494] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950324, 'name': CreateVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.954943] env[63024]: DEBUG oslo_vmware.api [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]521475e0-e035-56cb-aedc-2bc008c96314, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1556.966940] env[63024]: DEBUG nova.network.neutron [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Successfully updated port: fe1aa30b-c99e-4641-9d91-c99d20670de0 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1557.354292] env[63024]: DEBUG nova.compute.manager [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1557.448042] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950324, 'name': CreateVM_Task, 'duration_secs': 0.500965} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1557.451195] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1557.451910] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1557.452085] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1557.452393] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1557.453135] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a0080db-157d-41a8-89e3-2ab693eaafbb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.460445] env[63024]: DEBUG oslo_vmware.api [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]521475e0-e035-56cb-aedc-2bc008c96314, 'name': SearchDatastore_Task, 'duration_secs': 0.015901} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1557.460643] env[63024]: DEBUG oslo_concurrency.lockutils [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1557.460935] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] f90f35a2-f2ee-45e2-a9e4-afce50f29aa0/f90f35a2-f2ee-45e2-a9e4-afce50f29aa0.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1557.461238] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1557.461459] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1557.461809] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2763e5ac-6487-4c84-a6e7-6211c86748e4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.464760] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8979b44b-834a-425c-993f-32f17ee21c6c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.466686] env[63024]: DEBUG oslo_vmware.api [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Waiting for the task: (returnval){ [ 1557.466686] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52629d06-0b3c-585a-8f7c-a45a6b01c60d" [ 1557.466686] env[63024]: _type = "Task" [ 1557.466686] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1557.476627] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Acquiring lock "refresh_cache-b629b4f8-f79f-4361-b78c-8705a6888a9e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1557.476791] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Acquired lock "refresh_cache-b629b4f8-f79f-4361-b78c-8705a6888a9e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1557.477580] env[63024]: DEBUG nova.network.neutron [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1557.479845] env[63024]: DEBUG oslo_vmware.api [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Waiting for the task: (returnval){ [ 1557.479845] env[63024]: value = "task-1950325" [ 1557.479845] env[63024]: _type = "Task" [ 1557.479845] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1557.480113] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1557.480232] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1557.481992] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1897863e-d090-433a-b052-2f437f056a77 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.499168] env[63024]: DEBUG oslo_vmware.api [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1557.499168] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b6a624-9982-09bc-ae0e-e60ddce2a912" [ 1557.499168] env[63024]: _type = "Task" [ 1557.499168] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1557.499793] env[63024]: DEBUG oslo_vmware.api [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52629d06-0b3c-585a-8f7c-a45a6b01c60d, 'name': SearchDatastore_Task, 'duration_secs': 0.009199} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1557.503366] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1557.503718] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1557.503978] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1557.504244] env[63024]: DEBUG oslo_vmware.api [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Task: {'id': task-1950325, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.515403] env[63024]: DEBUG oslo_vmware.api [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b6a624-9982-09bc-ae0e-e60ddce2a912, 'name': SearchDatastore_Task, 'duration_secs': 0.00831} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1557.520218] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e69199e6-b73e-4ff3-989a-c3fa2f80be3b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1557.525987] env[63024]: DEBUG oslo_vmware.api [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1557.525987] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52013434-3e61-b25e-0212-84d93a00e331" [ 1557.525987] env[63024]: _type = "Task" [ 1557.525987] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1557.534931] env[63024]: DEBUG oslo_vmware.api [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52013434-3e61-b25e-0212-84d93a00e331, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1557.887078] env[63024]: DEBUG oslo_concurrency.lockutils [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1557.887377] env[63024]: DEBUG oslo_concurrency.lockutils [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1557.888942] env[63024]: INFO nova.compute.claims [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1557.995300] env[63024]: DEBUG oslo_vmware.api [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Task: {'id': task-1950325, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.483086} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1557.995675] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] f90f35a2-f2ee-45e2-a9e4-afce50f29aa0/f90f35a2-f2ee-45e2-a9e4-afce50f29aa0.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1557.995875] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1557.996112] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fbd65ea9-d54d-4554-8bb8-a7a6b9524289 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.006898] env[63024]: DEBUG oslo_vmware.api [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Waiting for the task: (returnval){ [ 1558.006898] env[63024]: value = "task-1950326" [ 1558.006898] env[63024]: _type = "Task" [ 1558.006898] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.016350] env[63024]: DEBUG oslo_vmware.api [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Task: {'id': task-1950326, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.039836] env[63024]: DEBUG oslo_vmware.api [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52013434-3e61-b25e-0212-84d93a00e331, 'name': SearchDatastore_Task, 'duration_secs': 0.008956} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.040335] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1558.040711] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] b0b4d94c-cd5c-4452-baa6-9aeec46b43ad/b0b4d94c-cd5c-4452-baa6-9aeec46b43ad.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1558.041171] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1558.041510] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1558.041875] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bd668aa0-4a19-43d8-9536-8e9e678c68d6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.044522] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-782a9088-9e03-4b3b-921c-b5eaa087f7dc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.055113] env[63024]: DEBUG oslo_vmware.api [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1558.055113] env[63024]: value = "task-1950327" [ 1558.055113] env[63024]: _type = "Task" [ 1558.055113] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.057167] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1558.057167] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1558.061142] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17a31e12-fdc9-414c-a545-17947383ca74 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.073064] env[63024]: DEBUG oslo_vmware.api [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950327, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.073408] env[63024]: DEBUG oslo_vmware.api [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Waiting for the task: (returnval){ [ 1558.073408] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52c5fa56-f759-83f6-2e83-0cc85e88d0eb" [ 1558.073408] env[63024]: _type = "Task" [ 1558.073408] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.080310] env[63024]: DEBUG nova.compute.manager [req-b69a5d07-b466-4217-8316-d9bafbb04c4f req-7ca7a7aa-a994-44ef-88e2-4bcf2e274c46 service nova] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Received event network-vif-plugged-223e2d99-de00-4474-8225-7fee75ac28d6 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1558.080371] env[63024]: DEBUG oslo_concurrency.lockutils [req-b69a5d07-b466-4217-8316-d9bafbb04c4f req-7ca7a7aa-a994-44ef-88e2-4bcf2e274c46 service nova] Acquiring lock "ccd80e20-9fc2-415a-a428-fcf85994c7f8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1558.081138] env[63024]: DEBUG oslo_concurrency.lockutils [req-b69a5d07-b466-4217-8316-d9bafbb04c4f req-7ca7a7aa-a994-44ef-88e2-4bcf2e274c46 service nova] Lock "ccd80e20-9fc2-415a-a428-fcf85994c7f8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1558.081138] env[63024]: DEBUG oslo_concurrency.lockutils [req-b69a5d07-b466-4217-8316-d9bafbb04c4f req-7ca7a7aa-a994-44ef-88e2-4bcf2e274c46 service nova] Lock "ccd80e20-9fc2-415a-a428-fcf85994c7f8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1558.081138] env[63024]: DEBUG nova.compute.manager [req-b69a5d07-b466-4217-8316-d9bafbb04c4f req-7ca7a7aa-a994-44ef-88e2-4bcf2e274c46 service nova] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] No waiting events found dispatching network-vif-plugged-223e2d99-de00-4474-8225-7fee75ac28d6 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1558.081138] env[63024]: WARNING nova.compute.manager [req-b69a5d07-b466-4217-8316-d9bafbb04c4f req-7ca7a7aa-a994-44ef-88e2-4bcf2e274c46 service nova] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Received unexpected event network-vif-plugged-223e2d99-de00-4474-8225-7fee75ac28d6 for instance with vm_state building and task_state spawning. [ 1558.081316] env[63024]: DEBUG nova.compute.manager [req-b69a5d07-b466-4217-8316-d9bafbb04c4f req-7ca7a7aa-a994-44ef-88e2-4bcf2e274c46 service nova] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Received event network-changed-223e2d99-de00-4474-8225-7fee75ac28d6 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1558.081398] env[63024]: DEBUG nova.compute.manager [req-b69a5d07-b466-4217-8316-d9bafbb04c4f req-7ca7a7aa-a994-44ef-88e2-4bcf2e274c46 service nova] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Refreshing instance network info cache due to event network-changed-223e2d99-de00-4474-8225-7fee75ac28d6. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1558.081570] env[63024]: DEBUG oslo_concurrency.lockutils [req-b69a5d07-b466-4217-8316-d9bafbb04c4f req-7ca7a7aa-a994-44ef-88e2-4bcf2e274c46 service nova] Acquiring lock "refresh_cache-ccd80e20-9fc2-415a-a428-fcf85994c7f8" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1558.081697] env[63024]: DEBUG oslo_concurrency.lockutils [req-b69a5d07-b466-4217-8316-d9bafbb04c4f req-7ca7a7aa-a994-44ef-88e2-4bcf2e274c46 service nova] Acquired lock "refresh_cache-ccd80e20-9fc2-415a-a428-fcf85994c7f8" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1558.082687] env[63024]: DEBUG nova.network.neutron [req-b69a5d07-b466-4217-8316-d9bafbb04c4f req-7ca7a7aa-a994-44ef-88e2-4bcf2e274c46 service nova] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Refreshing network info cache for port 223e2d99-de00-4474-8225-7fee75ac28d6 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1558.089299] env[63024]: DEBUG oslo_vmware.api [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52c5fa56-f759-83f6-2e83-0cc85e88d0eb, 'name': SearchDatastore_Task, 'duration_secs': 0.01115} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.089946] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15f95317-90df-4694-83d9-fd6282970d19 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.098109] env[63024]: DEBUG oslo_vmware.api [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Waiting for the task: (returnval){ [ 1558.098109] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b2bf71-6d0d-3e6e-f2a5-c684b97e5735" [ 1558.098109] env[63024]: _type = "Task" [ 1558.098109] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.107969] env[63024]: DEBUG oslo_vmware.api [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b2bf71-6d0d-3e6e-f2a5-c684b97e5735, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.122888] env[63024]: DEBUG nova.network.neutron [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1558.526772] env[63024]: DEBUG oslo_vmware.api [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Task: {'id': task-1950326, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077573} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.528616] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1558.531233] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cd56e63-69eb-4d09-86b0-dca085a4d7b9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.564808] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Reconfiguring VM instance instance-00000001 to attach disk [datastore1] f90f35a2-f2ee-45e2-a9e4-afce50f29aa0/f90f35a2-f2ee-45e2-a9e4-afce50f29aa0.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1558.565040] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b2d80d38-9e90-49f6-91d3-1550a16cb97a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.596846] env[63024]: DEBUG oslo_vmware.api [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950327, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.526587} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.598260] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] b0b4d94c-cd5c-4452-baa6-9aeec46b43ad/b0b4d94c-cd5c-4452-baa6-9aeec46b43ad.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1558.598512] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1558.598802] env[63024]: DEBUG oslo_vmware.api [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Waiting for the task: (returnval){ [ 1558.598802] env[63024]: value = "task-1950328" [ 1558.598802] env[63024]: _type = "Task" [ 1558.598802] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.599259] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-380a4d0d-8428-4f25-91b7-29938b1ea066 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.618831] env[63024]: DEBUG oslo_vmware.api [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b2bf71-6d0d-3e6e-f2a5-c684b97e5735, 'name': SearchDatastore_Task, 'duration_secs': 0.012148} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1558.621772] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1558.622068] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] ccd80e20-9fc2-415a-a428-fcf85994c7f8/ccd80e20-9fc2-415a-a428-fcf85994c7f8.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1558.622460] env[63024]: DEBUG oslo_vmware.api [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1558.622460] env[63024]: value = "task-1950329" [ 1558.622460] env[63024]: _type = "Task" [ 1558.622460] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.622803] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c8548717-1cf3-4778-abf8-ca0fc0c61ea2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1558.633815] env[63024]: DEBUG oslo_vmware.api [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950329, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.635211] env[63024]: DEBUG oslo_vmware.api [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Waiting for the task: (returnval){ [ 1558.635211] env[63024]: value = "task-1950330" [ 1558.635211] env[63024]: _type = "Task" [ 1558.635211] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1558.643723] env[63024]: DEBUG oslo_vmware.api [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950330, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1558.834792] env[63024]: DEBUG nova.compute.manager [req-bad7edd9-cb58-4a1d-bdc6-a810a479cbba req-54e090b0-0b04-4c26-be99-db1b5c7f561a service nova] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Received event network-vif-plugged-fe1aa30b-c99e-4641-9d91-c99d20670de0 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1558.834792] env[63024]: DEBUG oslo_concurrency.lockutils [req-bad7edd9-cb58-4a1d-bdc6-a810a479cbba req-54e090b0-0b04-4c26-be99-db1b5c7f561a service nova] Acquiring lock "b629b4f8-f79f-4361-b78c-8705a6888a9e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1558.834792] env[63024]: DEBUG oslo_concurrency.lockutils [req-bad7edd9-cb58-4a1d-bdc6-a810a479cbba req-54e090b0-0b04-4c26-be99-db1b5c7f561a service nova] Lock "b629b4f8-f79f-4361-b78c-8705a6888a9e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1558.834792] env[63024]: DEBUG oslo_concurrency.lockutils [req-bad7edd9-cb58-4a1d-bdc6-a810a479cbba req-54e090b0-0b04-4c26-be99-db1b5c7f561a service nova] Lock "b629b4f8-f79f-4361-b78c-8705a6888a9e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1558.834792] env[63024]: DEBUG nova.compute.manager [req-bad7edd9-cb58-4a1d-bdc6-a810a479cbba req-54e090b0-0b04-4c26-be99-db1b5c7f561a service nova] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] No waiting events found dispatching network-vif-plugged-fe1aa30b-c99e-4641-9d91-c99d20670de0 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1558.835120] env[63024]: WARNING nova.compute.manager [req-bad7edd9-cb58-4a1d-bdc6-a810a479cbba req-54e090b0-0b04-4c26-be99-db1b5c7f561a service nova] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Received unexpected event network-vif-plugged-fe1aa30b-c99e-4641-9d91-c99d20670de0 for instance with vm_state building and task_state spawning. [ 1558.893474] env[63024]: DEBUG nova.network.neutron [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Updating instance_info_cache with network_info: [{"id": "fe1aa30b-c99e-4641-9d91-c99d20670de0", "address": "fa:16:3e:ec:ef:e1", "network": {"id": "dab57617-8c96-4c9c-a117-05fd2262c951", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1124018667-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "751ed00ef16a4cca832e3c78731c9379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13d625c9-77ec-4edb-a56b-9f37a314cc39", "external-id": "nsx-vlan-transportzone-358", "segmentation_id": 358, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe1aa30b-c9", "ovs_interfaceid": "fe1aa30b-c99e-4641-9d91-c99d20670de0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1559.005364] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Acquiring lock "22ef5bae-f7bc-43c7-9d77-1b4547e83b24" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1559.005364] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Lock "22ef5bae-f7bc-43c7-9d77-1b4547e83b24" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1559.117298] env[63024]: DEBUG oslo_vmware.api [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Task: {'id': task-1950328, 'name': ReconfigVM_Task, 'duration_secs': 0.35022} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.117784] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Reconfigured VM instance instance-00000001 to attach disk [datastore1] f90f35a2-f2ee-45e2-a9e4-afce50f29aa0/f90f35a2-f2ee-45e2-a9e4-afce50f29aa0.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1559.118677] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9930bcf7-7e50-4362-9e54-0383d2242a40 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.126797] env[63024]: DEBUG oslo_vmware.api [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Waiting for the task: (returnval){ [ 1559.126797] env[63024]: value = "task-1950331" [ 1559.126797] env[63024]: _type = "Task" [ 1559.126797] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.157384] env[63024]: DEBUG oslo_vmware.api [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950329, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072497} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.162766] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1559.162926] env[63024]: DEBUG oslo_vmware.api [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Task: {'id': task-1950331, 'name': Rename_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.163922] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e4f1969-cec9-4b1f-a7ce-5ecc7a4e591e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.171473] env[63024]: DEBUG oslo_vmware.api [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950330, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.196122] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Reconfiguring VM instance instance-00000002 to attach disk [datastore1] b0b4d94c-cd5c-4452-baa6-9aeec46b43ad/b0b4d94c-cd5c-4452-baa6-9aeec46b43ad.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1559.197210] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-62094049-a3c0-4c93-b619-f5fb7c44b105 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.217830] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-793e745f-5943-43dd-a567-e2beed3f2644 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.230935] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e48deb3-7426-4c20-8e73-87727149740c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.236298] env[63024]: DEBUG oslo_vmware.api [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1559.236298] env[63024]: value = "task-1950332" [ 1559.236298] env[63024]: _type = "Task" [ 1559.236298] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.270304] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63563129-ab55-4068-b9ef-59dafd3d036c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.274567] env[63024]: DEBUG oslo_vmware.api [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950332, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.280111] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ae9967a-23e7-41b6-a0b5-2c5174434a58 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.295671] env[63024]: DEBUG nova.compute.provider_tree [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1559.398279] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Releasing lock "refresh_cache-b629b4f8-f79f-4361-b78c-8705a6888a9e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1559.398279] env[63024]: DEBUG nova.compute.manager [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Instance network_info: |[{"id": "fe1aa30b-c99e-4641-9d91-c99d20670de0", "address": "fa:16:3e:ec:ef:e1", "network": {"id": "dab57617-8c96-4c9c-a117-05fd2262c951", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1124018667-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "751ed00ef16a4cca832e3c78731c9379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13d625c9-77ec-4edb-a56b-9f37a314cc39", "external-id": "nsx-vlan-transportzone-358", "segmentation_id": 358, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe1aa30b-c9", "ovs_interfaceid": "fe1aa30b-c99e-4641-9d91-c99d20670de0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1559.398628] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ec:ef:e1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '13d625c9-77ec-4edb-a56b-9f37a314cc39', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fe1aa30b-c99e-4641-9d91-c99d20670de0', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1559.404657] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Creating folder: Project (751ed00ef16a4cca832e3c78731c9379). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1559.405334] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7cc22a49-f511-4e3d-809a-5068025e8438 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.418227] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Created folder: Project (751ed00ef16a4cca832e3c78731c9379) in parent group-v401959. [ 1559.418227] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Creating folder: Instances. Parent ref: group-v401969. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1559.418227] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-817b657b-b814-46b9-a60a-6b33e4ae0bbf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.426995] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Created folder: Instances in parent group-v401969. [ 1559.427368] env[63024]: DEBUG oslo.service.loopingcall [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1559.427578] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1559.427787] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-943bf223-1600-4d0c-9568-96080a0c669c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.448392] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1559.448392] env[63024]: value = "task-1950335" [ 1559.448392] env[63024]: _type = "Task" [ 1559.448392] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.456388] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950335, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.459904] env[63024]: DEBUG nova.network.neutron [req-b69a5d07-b466-4217-8316-d9bafbb04c4f req-7ca7a7aa-a994-44ef-88e2-4bcf2e274c46 service nova] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Updated VIF entry in instance network info cache for port 223e2d99-de00-4474-8225-7fee75ac28d6. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1559.460149] env[63024]: DEBUG nova.network.neutron [req-b69a5d07-b466-4217-8316-d9bafbb04c4f req-7ca7a7aa-a994-44ef-88e2-4bcf2e274c46 service nova] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Updating instance_info_cache with network_info: [{"id": "223e2d99-de00-4474-8225-7fee75ac28d6", "address": "fa:16:3e:83:4e:39", "network": {"id": "f42f7ff4-b2ef-45fd-8230-5f16271d5808", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-607041553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "5489a064ee1d44f0bd6c496f4775b9d6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56834f67-27a8-43dc-bbc6-a74aaa08959b", "external-id": "nsx-vlan-transportzone-949", "segmentation_id": 949, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap223e2d99-de", "ovs_interfaceid": "223e2d99-de00-4474-8225-7fee75ac28d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1559.513703] env[63024]: DEBUG nova.compute.manager [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1559.643508] env[63024]: DEBUG oslo_vmware.api [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Task: {'id': task-1950331, 'name': Rename_Task, 'duration_secs': 0.139241} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.643823] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1559.644035] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c6ce930c-281e-4de4-a4b2-451896052a7b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.651218] env[63024]: DEBUG oslo_vmware.api [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Waiting for the task: (returnval){ [ 1559.651218] env[63024]: value = "task-1950336" [ 1559.651218] env[63024]: _type = "Task" [ 1559.651218] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.657667] env[63024]: DEBUG oslo_vmware.api [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950330, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.549611} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.658264] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] ccd80e20-9fc2-415a-a428-fcf85994c7f8/ccd80e20-9fc2-415a-a428-fcf85994c7f8.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1559.658469] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1559.658691] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bdd2eeb8-d990-43b1-a1a8-cd968251ec1f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.663573] env[63024]: DEBUG oslo_vmware.api [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Task: {'id': task-1950336, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.667984] env[63024]: DEBUG oslo_vmware.api [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Waiting for the task: (returnval){ [ 1559.667984] env[63024]: value = "task-1950337" [ 1559.667984] env[63024]: _type = "Task" [ 1559.667984] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.675549] env[63024]: DEBUG oslo_vmware.api [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950337, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.746616] env[63024]: DEBUG oslo_vmware.api [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950332, 'name': ReconfigVM_Task, 'duration_secs': 0.317051} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.747078] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Reconfigured VM instance instance-00000002 to attach disk [datastore1] b0b4d94c-cd5c-4452-baa6-9aeec46b43ad/b0b4d94c-cd5c-4452-baa6-9aeec46b43ad.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1559.747679] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-84ae1cf0-e801-4983-b516-6f452696bb72 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.753300] env[63024]: DEBUG oslo_vmware.api [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1559.753300] env[63024]: value = "task-1950338" [ 1559.753300] env[63024]: _type = "Task" [ 1559.753300] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.761910] env[63024]: DEBUG oslo_vmware.api [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950338, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1559.803022] env[63024]: DEBUG nova.scheduler.client.report [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1559.965019] env[63024]: DEBUG oslo_concurrency.lockutils [req-b69a5d07-b466-4217-8316-d9bafbb04c4f req-7ca7a7aa-a994-44ef-88e2-4bcf2e274c46 service nova] Releasing lock "refresh_cache-ccd80e20-9fc2-415a-a428-fcf85994c7f8" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1559.965019] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950335, 'name': CreateVM_Task, 'duration_secs': 0.399494} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1559.965019] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1559.965019] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1559.965019] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1559.965264] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1559.965264] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d964749-e4e5-439e-9d33-f8592f2393f3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1559.971501] env[63024]: DEBUG oslo_vmware.api [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Waiting for the task: (returnval){ [ 1559.971501] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52db7cb1-f330-3fff-59e6-da6cda9c8aca" [ 1559.971501] env[63024]: _type = "Task" [ 1559.971501] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1559.982361] env[63024]: DEBUG oslo_vmware.api [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52db7cb1-f330-3fff-59e6-da6cda9c8aca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.045415] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1560.166294] env[63024]: DEBUG oslo_vmware.api [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Task: {'id': task-1950336, 'name': PowerOnVM_Task, 'duration_secs': 0.4707} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.166598] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1560.167332] env[63024]: INFO nova.compute.manager [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Took 13.42 seconds to spawn the instance on the hypervisor. [ 1560.167639] env[63024]: DEBUG nova.compute.manager [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1560.168745] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-781f1500-34f0-451e-adaa-5ec280b3b819 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.190145] env[63024]: DEBUG oslo_vmware.api [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950337, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068072} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.190145] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1560.190145] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b66cfaa3-31c4-4346-9ae6-72345f10b8d3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.218200] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Reconfiguring VM instance instance-00000003 to attach disk [datastore1] ccd80e20-9fc2-415a-a428-fcf85994c7f8/ccd80e20-9fc2-415a-a428-fcf85994c7f8.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1560.218611] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d65f2246-99db-423c-b742-322c23602a10 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.240577] env[63024]: DEBUG oslo_vmware.api [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Waiting for the task: (returnval){ [ 1560.240577] env[63024]: value = "task-1950339" [ 1560.240577] env[63024]: _type = "Task" [ 1560.240577] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.251239] env[63024]: DEBUG oslo_vmware.api [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950339, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.263683] env[63024]: DEBUG oslo_vmware.api [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950338, 'name': Rename_Task, 'duration_secs': 0.131678} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.264900] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1560.264900] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8aee76b4-a7b5-48ba-8c7b-84a1cb1839f7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.271630] env[63024]: DEBUG oslo_vmware.api [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1560.271630] env[63024]: value = "task-1950340" [ 1560.271630] env[63024]: _type = "Task" [ 1560.271630] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.281508] env[63024]: DEBUG oslo_vmware.api [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950340, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.312602] env[63024]: DEBUG oslo_concurrency.lockutils [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.422s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1560.312602] env[63024]: DEBUG nova.compute.manager [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1560.314356] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.269s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1560.316205] env[63024]: INFO nova.compute.claims [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1560.484985] env[63024]: DEBUG oslo_vmware.api [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52db7cb1-f330-3fff-59e6-da6cda9c8aca, 'name': SearchDatastore_Task, 'duration_secs': 0.009872} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.485089] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1560.485366] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1560.485627] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1560.487073] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1560.487073] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1560.487073] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dd3f8635-78b6-4ac6-b919-42a1bb8b0321 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.499484] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1560.499484] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1560.499484] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-568e32e3-bfee-46e3-9784-f1004289aeff {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.508731] env[63024]: DEBUG oslo_vmware.api [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Waiting for the task: (returnval){ [ 1560.508731] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52d5d25e-9b4a-8826-9770-4da4a49c381c" [ 1560.508731] env[63024]: _type = "Task" [ 1560.508731] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.522758] env[63024]: DEBUG oslo_vmware.api [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52d5d25e-9b4a-8826-9770-4da4a49c381c, 'name': SearchDatastore_Task, 'duration_secs': 0.00896} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.523920] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-190bf1a5-0a56-426e-97ae-2345311d795a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.535822] env[63024]: DEBUG oslo_vmware.api [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Waiting for the task: (returnval){ [ 1560.535822] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52ae86de-7039-e737-0401-18c26f47ffd2" [ 1560.535822] env[63024]: _type = "Task" [ 1560.535822] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.553318] env[63024]: DEBUG oslo_vmware.api [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52ae86de-7039-e737-0401-18c26f47ffd2, 'name': SearchDatastore_Task, 'duration_secs': 0.009317} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.553714] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1560.553948] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] b629b4f8-f79f-4361-b78c-8705a6888a9e/b629b4f8-f79f-4361-b78c-8705a6888a9e.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1560.554501] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4cbd6ce6-c755-42f1-b0d2-2ca2ae9c5f34 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.565330] env[63024]: DEBUG oslo_vmware.api [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Waiting for the task: (returnval){ [ 1560.565330] env[63024]: value = "task-1950341" [ 1560.565330] env[63024]: _type = "Task" [ 1560.565330] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.574015] env[63024]: DEBUG oslo_vmware.api [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Task: {'id': task-1950341, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.705273] env[63024]: INFO nova.compute.manager [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Took 18.22 seconds to build instance. [ 1560.760252] env[63024]: DEBUG oslo_vmware.api [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950339, 'name': ReconfigVM_Task, 'duration_secs': 0.343975} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.760919] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Reconfigured VM instance instance-00000003 to attach disk [datastore1] ccd80e20-9fc2-415a-a428-fcf85994c7f8/ccd80e20-9fc2-415a-a428-fcf85994c7f8.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1560.761912] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f1e50c04-cd41-48ff-bf43-88dc071b5287 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.769721] env[63024]: DEBUG oslo_vmware.api [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Waiting for the task: (returnval){ [ 1560.769721] env[63024]: value = "task-1950342" [ 1560.769721] env[63024]: _type = "Task" [ 1560.769721] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1560.784134] env[63024]: DEBUG oslo_vmware.api [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950342, 'name': Rename_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1560.788444] env[63024]: DEBUG oslo_vmware.api [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950340, 'name': PowerOnVM_Task, 'duration_secs': 0.462764} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1560.788444] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1560.788548] env[63024]: INFO nova.compute.manager [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Took 11.86 seconds to spawn the instance on the hypervisor. [ 1560.789582] env[63024]: DEBUG nova.compute.manager [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1560.789743] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d7e135e-a3b8-466d-8915-7fb466081409 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1560.823665] env[63024]: DEBUG nova.compute.utils [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1560.827805] env[63024]: DEBUG nova.compute.manager [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1560.827805] env[63024]: DEBUG nova.network.neutron [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1560.937430] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Acquiring lock "61fdfa06-cb40-44a3-8abc-428b26bd40f5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1560.937775] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Lock "61fdfa06-cb40-44a3-8abc-428b26bd40f5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1561.079419] env[63024]: DEBUG oslo_vmware.api [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Task: {'id': task-1950341, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.081510] env[63024]: DEBUG nova.policy [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '868b92472ee24327a55c68efce691ba9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8a5452991c0c433d987f52bad5c89d22', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1561.208991] env[63024]: DEBUG oslo_concurrency.lockutils [None req-15d9daaf-7f21-4233-b710-69bb6285b30a tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Lock "f90f35a2-f2ee-45e2-a9e4-afce50f29aa0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.737s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1561.282931] env[63024]: DEBUG oslo_vmware.api [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950342, 'name': Rename_Task, 'duration_secs': 0.444352} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1561.283138] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1561.283375] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d1ced31c-3c0b-45ba-b1ea-bf98388f7a05 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.292053] env[63024]: DEBUG oslo_vmware.api [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Waiting for the task: (returnval){ [ 1561.292053] env[63024]: value = "task-1950343" [ 1561.292053] env[63024]: _type = "Task" [ 1561.292053] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1561.303773] env[63024]: DEBUG oslo_vmware.api [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950343, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.314966] env[63024]: DEBUG oslo_concurrency.lockutils [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Acquiring lock "51532b8e-4adf-4cc7-b91e-885d7934a7e8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1561.315391] env[63024]: DEBUG oslo_concurrency.lockutils [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Lock "51532b8e-4adf-4cc7-b91e-885d7934a7e8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1561.317073] env[63024]: INFO nova.compute.manager [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Took 17.81 seconds to build instance. [ 1561.332062] env[63024]: DEBUG nova.compute.manager [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1561.442613] env[63024]: DEBUG nova.compute.manager [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1561.517342] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87c48bb3-1a9a-49b8-bed6-f84e341b3e9e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.528055] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e320c23-f3de-450e-9a85-5e24fcc22a96 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.564482] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a29617b-5039-4217-b2ea-1c1da7e17c1e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.579235] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd17d6f9-9bfa-46ed-a958-cb0c8158d050 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.583630] env[63024]: DEBUG oslo_vmware.api [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Task: {'id': task-1950341, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.575713} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1561.583906] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] b629b4f8-f79f-4361-b78c-8705a6888a9e/b629b4f8-f79f-4361-b78c-8705a6888a9e.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1561.584138] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1561.585220] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-254c9c8e-798e-4429-816f-a0370dbf6b79 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1561.596690] env[63024]: DEBUG nova.compute.provider_tree [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1561.605908] env[63024]: DEBUG oslo_vmware.api [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Waiting for the task: (returnval){ [ 1561.605908] env[63024]: value = "task-1950344" [ 1561.605908] env[63024]: _type = "Task" [ 1561.605908] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1561.617947] env[63024]: DEBUG oslo_vmware.api [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Task: {'id': task-1950344, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.809253] env[63024]: DEBUG oslo_vmware.api [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950343, 'name': PowerOnVM_Task} progress is 64%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1561.817889] env[63024]: DEBUG nova.compute.manager [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] [instance: 51532b8e-4adf-4cc7-b91e-885d7934a7e8] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1561.824825] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f96c789b-cc54-4712-92bd-644caaa71218 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "b0b4d94c-cd5c-4452-baa6-9aeec46b43ad" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.325s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1561.985683] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1562.102121] env[63024]: DEBUG nova.scheduler.client.report [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1562.125531] env[63024]: DEBUG oslo_vmware.api [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Task: {'id': task-1950344, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069414} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1562.125531] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1562.127421] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-224c7470-1a65-4629-a357-a8c960cfca01 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.185144] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Reconfiguring VM instance instance-00000004 to attach disk [datastore1] b629b4f8-f79f-4361-b78c-8705a6888a9e/b629b4f8-f79f-4361-b78c-8705a6888a9e.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1562.188282] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c905e2bc-3cd2-439d-8881-9e6448e1cf4e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.221040] env[63024]: DEBUG nova.compute.manager [req-703db284-6534-417b-be0a-5bbbfac4b3f0 req-f07b815c-605b-44f3-a03f-13b8e33cda2d service nova] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Received event network-changed-fe1aa30b-c99e-4641-9d91-c99d20670de0 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1562.221506] env[63024]: DEBUG nova.compute.manager [req-703db284-6534-417b-be0a-5bbbfac4b3f0 req-f07b815c-605b-44f3-a03f-13b8e33cda2d service nova] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Refreshing instance network info cache due to event network-changed-fe1aa30b-c99e-4641-9d91-c99d20670de0. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1562.221927] env[63024]: DEBUG oslo_concurrency.lockutils [req-703db284-6534-417b-be0a-5bbbfac4b3f0 req-f07b815c-605b-44f3-a03f-13b8e33cda2d service nova] Acquiring lock "refresh_cache-b629b4f8-f79f-4361-b78c-8705a6888a9e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1562.222085] env[63024]: DEBUG oslo_concurrency.lockutils [req-703db284-6534-417b-be0a-5bbbfac4b3f0 req-f07b815c-605b-44f3-a03f-13b8e33cda2d service nova] Acquired lock "refresh_cache-b629b4f8-f79f-4361-b78c-8705a6888a9e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1562.222253] env[63024]: DEBUG nova.network.neutron [req-703db284-6534-417b-be0a-5bbbfac4b3f0 req-f07b815c-605b-44f3-a03f-13b8e33cda2d service nova] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Refreshing network info cache for port fe1aa30b-c99e-4641-9d91-c99d20670de0 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1562.232587] env[63024]: DEBUG oslo_vmware.api [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Waiting for the task: (returnval){ [ 1562.232587] env[63024]: value = "task-1950345" [ 1562.232587] env[63024]: _type = "Task" [ 1562.232587] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1562.244860] env[63024]: DEBUG oslo_vmware.api [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Task: {'id': task-1950345, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.314116] env[63024]: DEBUG oslo_vmware.api [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950343, 'name': PowerOnVM_Task, 'duration_secs': 0.964386} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1562.314116] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1562.314116] env[63024]: INFO nova.compute.manager [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Took 11.17 seconds to spawn the instance on the hypervisor. [ 1562.314116] env[63024]: DEBUG nova.compute.manager [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1562.314904] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26d578c7-d330-4f38-b769-d11abea63f92 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.352449] env[63024]: DEBUG nova.compute.manager [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1562.411297] env[63024]: DEBUG oslo_concurrency.lockutils [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1562.425651] env[63024]: DEBUG nova.virt.hardware [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1562.427903] env[63024]: DEBUG nova.virt.hardware [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1562.427903] env[63024]: DEBUG nova.virt.hardware [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1562.428221] env[63024]: DEBUG nova.virt.hardware [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1562.428433] env[63024]: DEBUG nova.virt.hardware [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1562.428677] env[63024]: DEBUG nova.virt.hardware [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1562.429101] env[63024]: DEBUG nova.virt.hardware [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1562.430023] env[63024]: DEBUG nova.virt.hardware [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1562.430023] env[63024]: DEBUG nova.virt.hardware [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1562.430023] env[63024]: DEBUG nova.virt.hardware [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1562.430308] env[63024]: DEBUG nova.virt.hardware [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1562.431508] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3286eddd-ee37-486c-acb0-f07494165aeb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.445373] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35faf595-c064-40e7-b540-b14d8f973254 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.620283] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.305s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1562.620283] env[63024]: DEBUG nova.compute.manager [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1562.627424] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.641s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1562.630061] env[63024]: INFO nova.compute.claims [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1562.745643] env[63024]: DEBUG oslo_vmware.api [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Task: {'id': task-1950345, 'name': ReconfigVM_Task, 'duration_secs': 0.43587} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1562.746523] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Reconfigured VM instance instance-00000004 to attach disk [datastore1] b629b4f8-f79f-4361-b78c-8705a6888a9e/b629b4f8-f79f-4361-b78c-8705a6888a9e.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1562.747987] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e918ebff-3fea-4ed0-b157-77514527b119 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1562.758048] env[63024]: DEBUG oslo_vmware.api [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Waiting for the task: (returnval){ [ 1562.758048] env[63024]: value = "task-1950346" [ 1562.758048] env[63024]: _type = "Task" [ 1562.758048] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1562.767890] env[63024]: DEBUG oslo_vmware.api [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Task: {'id': task-1950346, 'name': Rename_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1562.829602] env[63024]: DEBUG nova.network.neutron [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Successfully created port: 90fdf5d2-f22d-4b1f-9b65-a0975e5c1cd3 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1562.847066] env[63024]: INFO nova.compute.manager [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Took 18.43 seconds to build instance. [ 1563.134885] env[63024]: DEBUG nova.compute.utils [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1563.140290] env[63024]: DEBUG nova.compute.manager [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1563.140501] env[63024]: DEBUG nova.network.neutron [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1563.267762] env[63024]: DEBUG oslo_vmware.api [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Task: {'id': task-1950346, 'name': Rename_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.353202] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d637b5c2-ee33-4a01-b61c-9461ce18d668 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Lock "ccd80e20-9fc2-415a-a428-fcf85994c7f8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.945s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1563.638941] env[63024]: DEBUG nova.policy [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ba28d277d86b4aba93743edd84852b71', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fd1cc46013d64c54a68b24c282ffe48b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1563.643268] env[63024]: DEBUG nova.compute.manager [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1563.768655] env[63024]: DEBUG oslo_vmware.api [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Task: {'id': task-1950346, 'name': Rename_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1563.803440] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5af630e4-e540-4035-acce-7695db1b2cb3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.818023] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58ce9a1f-0ca1-4835-aa39-5b528b8e9bd2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.870693] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42a73d89-77cd-4a1f-8ba4-f8df0b798a27 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.880196] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6ef0bdd-d218-4633-9ba2-b3c6718c5a40 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1563.896594] env[63024]: DEBUG nova.compute.provider_tree [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1564.278256] env[63024]: DEBUG oslo_vmware.api [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Task: {'id': task-1950346, 'name': Rename_Task, 'duration_secs': 1.218984} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1564.279772] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1564.282379] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4d909eac-9ef3-461a-9c77-26561025c618 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.285270] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Acquiring lock "2bfcd5e1-b1d9-4829-bea5-d8c460ceec16" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1564.285547] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Lock "2bfcd5e1-b1d9-4829-bea5-d8c460ceec16" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1564.294108] env[63024]: DEBUG oslo_vmware.api [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Waiting for the task: (returnval){ [ 1564.294108] env[63024]: value = "task-1950347" [ 1564.294108] env[63024]: _type = "Task" [ 1564.294108] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1564.305761] env[63024]: DEBUG oslo_vmware.api [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Task: {'id': task-1950347, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.401132] env[63024]: DEBUG nova.scheduler.client.report [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1564.662604] env[63024]: DEBUG nova.compute.manager [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1564.709501] env[63024]: DEBUG nova.virt.hardware [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1564.709833] env[63024]: DEBUG nova.virt.hardware [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1564.710038] env[63024]: DEBUG nova.virt.hardware [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1564.710578] env[63024]: DEBUG nova.virt.hardware [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1564.711221] env[63024]: DEBUG nova.virt.hardware [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1564.711454] env[63024]: DEBUG nova.virt.hardware [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1564.711720] env[63024]: DEBUG nova.virt.hardware [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1564.711924] env[63024]: DEBUG nova.virt.hardware [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1564.712147] env[63024]: DEBUG nova.virt.hardware [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1564.716038] env[63024]: DEBUG nova.virt.hardware [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1564.716038] env[63024]: DEBUG nova.virt.hardware [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1564.716038] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2deafa68-9b11-412c-9261-f7ee609388e8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.718698] env[63024]: DEBUG nova.network.neutron [req-703db284-6534-417b-be0a-5bbbfac4b3f0 req-f07b815c-605b-44f3-a03f-13b8e33cda2d service nova] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Updated VIF entry in instance network info cache for port fe1aa30b-c99e-4641-9d91-c99d20670de0. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1564.719200] env[63024]: DEBUG nova.network.neutron [req-703db284-6534-417b-be0a-5bbbfac4b3f0 req-f07b815c-605b-44f3-a03f-13b8e33cda2d service nova] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Updating instance_info_cache with network_info: [{"id": "fe1aa30b-c99e-4641-9d91-c99d20670de0", "address": "fa:16:3e:ec:ef:e1", "network": {"id": "dab57617-8c96-4c9c-a117-05fd2262c951", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1124018667-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "751ed00ef16a4cca832e3c78731c9379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13d625c9-77ec-4edb-a56b-9f37a314cc39", "external-id": "nsx-vlan-transportzone-358", "segmentation_id": 358, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe1aa30b-c9", "ovs_interfaceid": "fe1aa30b-c99e-4641-9d91-c99d20670de0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1564.730800] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52a83935-3327-45b0-b98a-de4ca8d64330 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.795661] env[63024]: DEBUG nova.compute.manager [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1564.804967] env[63024]: DEBUG oslo_vmware.api [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Task: {'id': task-1950347, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1564.908165] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.281s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1564.908716] env[63024]: DEBUG nova.compute.manager [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1564.911518] env[63024]: DEBUG oslo_concurrency.lockutils [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.501s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1564.917191] env[63024]: INFO nova.compute.claims [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] [instance: 51532b8e-4adf-4cc7-b91e-885d7934a7e8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1564.924785] env[63024]: DEBUG oslo_concurrency.lockutils [None req-46720010-db2f-457a-9c8c-5b017ebb773c tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquiring lock "b0b4d94c-cd5c-4452-baa6-9aeec46b43ad" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1564.925050] env[63024]: DEBUG oslo_concurrency.lockutils [None req-46720010-db2f-457a-9c8c-5b017ebb773c tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "b0b4d94c-cd5c-4452-baa6-9aeec46b43ad" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1564.925231] env[63024]: DEBUG nova.compute.manager [None req-46720010-db2f-457a-9c8c-5b017ebb773c tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1564.930736] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dee7cf48-2d40-4a57-bdaf-32532bcc87b8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1564.941487] env[63024]: DEBUG nova.compute.manager [None req-46720010-db2f-457a-9c8c-5b017ebb773c tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63024) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1564.946290] env[63024]: DEBUG nova.objects.instance [None req-46720010-db2f-457a-9c8c-5b017ebb773c tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lazy-loading 'flavor' on Instance uuid b0b4d94c-cd5c-4452-baa6-9aeec46b43ad {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1565.222709] env[63024]: DEBUG oslo_concurrency.lockutils [req-703db284-6534-417b-be0a-5bbbfac4b3f0 req-f07b815c-605b-44f3-a03f-13b8e33cda2d service nova] Releasing lock "refresh_cache-b629b4f8-f79f-4361-b78c-8705a6888a9e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1565.321398] env[63024]: DEBUG oslo_vmware.api [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Task: {'id': task-1950347, 'name': PowerOnVM_Task, 'duration_secs': 0.830724} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1565.321710] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1565.321917] env[63024]: INFO nova.compute.manager [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Took 11.97 seconds to spawn the instance on the hypervisor. [ 1565.322137] env[63024]: DEBUG nova.compute.manager [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1565.323171] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1ba7d3b-e67e-4ba1-b843-67cfd4af6f88 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.428669] env[63024]: DEBUG nova.compute.utils [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1565.438446] env[63024]: DEBUG nova.compute.manager [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1565.438631] env[63024]: DEBUG nova.network.neutron [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1565.542998] env[63024]: DEBUG nova.policy [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5c26fddd2d7f4893acc552b0c955e613', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '7e2bf9d113204b598844e72d5d49f155', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1565.613671] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1565.695289] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Acquiring lock "e03b8577-9298-4e88-98ea-6258e97db28d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1565.695289] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Lock "e03b8577-9298-4e88-98ea-6258e97db28d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1565.851574] env[63024]: INFO nova.compute.manager [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Took 20.42 seconds to build instance. [ 1565.920496] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1565.939177] env[63024]: DEBUG nova.compute.manager [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1565.963419] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-46720010-db2f-457a-9c8c-5b017ebb773c tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1565.963562] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-668d4ba6-ad0b-47c5-b3d6-0aae6ece29f3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1565.973561] env[63024]: DEBUG oslo_vmware.api [None req-46720010-db2f-457a-9c8c-5b017ebb773c tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1565.973561] env[63024]: value = "task-1950348" [ 1565.973561] env[63024]: _type = "Task" [ 1565.973561] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1565.977020] env[63024]: DEBUG nova.scheduler.client.report [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Refreshing inventories for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1565.985979] env[63024]: DEBUG oslo_vmware.api [None req-46720010-db2f-457a-9c8c-5b017ebb773c tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950348, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1565.998940] env[63024]: DEBUG nova.scheduler.client.report [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Updating ProviderTree inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1565.999477] env[63024]: DEBUG nova.compute.provider_tree [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1566.020435] env[63024]: DEBUG nova.network.neutron [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Successfully created port: 7744ae27-9eae-4bcd-b5d8-425150caba4f {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1566.025919] env[63024]: DEBUG nova.scheduler.client.report [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Refreshing aggregate associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, aggregates: None {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1566.051258] env[63024]: DEBUG nova.scheduler.client.report [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Refreshing trait associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1566.321167] env[63024]: DEBUG nova.compute.manager [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1566.354240] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4b31faa8-25e0-4955-86f0-16c7843004ed tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Lock "b629b4f8-f79f-4361-b78c-8705a6888a9e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.927s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1566.411713] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1566.416654] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e28ddc3e-c115-40a1-bb51-0f80624546f2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.426469] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e5dab6f-c91c-4b73-bf01-75a954d8eea7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.471927] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2b5e732-bec1-47bf-b386-92ff15d95fbf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.483447] env[63024]: DEBUG oslo_vmware.api [None req-46720010-db2f-457a-9c8c-5b017ebb773c tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950348, 'name': PowerOffVM_Task, 'duration_secs': 0.291018} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1566.486145] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-46720010-db2f-457a-9c8c-5b017ebb773c tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1566.486642] env[63024]: DEBUG nova.compute.manager [None req-46720010-db2f-457a-9c8c-5b017ebb773c tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1566.487391] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9303fa8-44c3-41d7-9256-290547e70b46 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.491496] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cffa33e2-eb30-4569-9d1d-f90899bf23b2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1566.516292] env[63024]: DEBUG nova.compute.provider_tree [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1566.746641] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1566.928950] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1566.971224] env[63024]: DEBUG nova.compute.manager [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1567.004411] env[63024]: DEBUG nova.virt.hardware [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1567.006756] env[63024]: DEBUG nova.virt.hardware [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1567.006756] env[63024]: DEBUG nova.virt.hardware [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1567.006756] env[63024]: DEBUG nova.virt.hardware [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1567.006756] env[63024]: DEBUG nova.virt.hardware [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1567.006756] env[63024]: DEBUG nova.virt.hardware [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1567.007016] env[63024]: DEBUG nova.virt.hardware [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1567.007016] env[63024]: DEBUG nova.virt.hardware [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1567.007016] env[63024]: DEBUG nova.virt.hardware [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1567.007016] env[63024]: DEBUG nova.virt.hardware [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1567.007016] env[63024]: DEBUG nova.virt.hardware [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1567.008988] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61ff8ceb-1d5e-406d-a88e-0086da885d2d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.015822] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0002132-e1d2-419e-9a04-6f3977d5b159 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.021907] env[63024]: DEBUG nova.scheduler.client.report [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1567.025761] env[63024]: DEBUG oslo_concurrency.lockutils [None req-46720010-db2f-457a-9c8c-5b017ebb773c tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "b0b4d94c-cd5c-4452-baa6-9aeec46b43ad" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.101s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1567.057902] env[63024]: DEBUG nova.network.neutron [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Successfully created port: 421d2adb-43a2-41f5-b64d-29989f6a0fa5 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1567.106915] env[63024]: DEBUG oslo_concurrency.lockutils [None req-988d5bc0-fea9-4cd4-83c5-eae1d9fdd5b7 tempest-DeleteServersAdminTestJSON-2143423831 tempest-DeleteServersAdminTestJSON-2143423831-project-admin] Acquiring lock "f90f35a2-f2ee-45e2-a9e4-afce50f29aa0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1567.107366] env[63024]: DEBUG oslo_concurrency.lockutils [None req-988d5bc0-fea9-4cd4-83c5-eae1d9fdd5b7 tempest-DeleteServersAdminTestJSON-2143423831 tempest-DeleteServersAdminTestJSON-2143423831-project-admin] Lock "f90f35a2-f2ee-45e2-a9e4-afce50f29aa0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1567.107504] env[63024]: DEBUG oslo_concurrency.lockutils [None req-988d5bc0-fea9-4cd4-83c5-eae1d9fdd5b7 tempest-DeleteServersAdminTestJSON-2143423831 tempest-DeleteServersAdminTestJSON-2143423831-project-admin] Acquiring lock "f90f35a2-f2ee-45e2-a9e4-afce50f29aa0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1567.107564] env[63024]: DEBUG oslo_concurrency.lockutils [None req-988d5bc0-fea9-4cd4-83c5-eae1d9fdd5b7 tempest-DeleteServersAdminTestJSON-2143423831 tempest-DeleteServersAdminTestJSON-2143423831-project-admin] Lock "f90f35a2-f2ee-45e2-a9e4-afce50f29aa0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1567.110930] env[63024]: DEBUG oslo_concurrency.lockutils [None req-988d5bc0-fea9-4cd4-83c5-eae1d9fdd5b7 tempest-DeleteServersAdminTestJSON-2143423831 tempest-DeleteServersAdminTestJSON-2143423831-project-admin] Lock "f90f35a2-f2ee-45e2-a9e4-afce50f29aa0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1567.111278] env[63024]: INFO nova.compute.manager [None req-988d5bc0-fea9-4cd4-83c5-eae1d9fdd5b7 tempest-DeleteServersAdminTestJSON-2143423831 tempest-DeleteServersAdminTestJSON-2143423831-project-admin] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Terminating instance [ 1567.417061] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1567.417402] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager.update_available_resource {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1567.532964] env[63024]: DEBUG oslo_concurrency.lockutils [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.619s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1567.532964] env[63024]: DEBUG nova.compute.manager [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] [instance: 51532b8e-4adf-4cc7-b91e-885d7934a7e8] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1567.537937] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.922s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1567.537937] env[63024]: INFO nova.compute.claims [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1567.616852] env[63024]: DEBUG nova.compute.manager [None req-988d5bc0-fea9-4cd4-83c5-eae1d9fdd5b7 tempest-DeleteServersAdminTestJSON-2143423831 tempest-DeleteServersAdminTestJSON-2143423831-project-admin] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1567.617073] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-988d5bc0-fea9-4cd4-83c5-eae1d9fdd5b7 tempest-DeleteServersAdminTestJSON-2143423831 tempest-DeleteServersAdminTestJSON-2143423831-project-admin] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1567.617998] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d622e66c-f53e-4fa2-9e4f-d0418fce4c57 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.625928] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-988d5bc0-fea9-4cd4-83c5-eae1d9fdd5b7 tempest-DeleteServersAdminTestJSON-2143423831 tempest-DeleteServersAdminTestJSON-2143423831-project-admin] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1567.629022] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d7852e44-e605-45ef-b3db-2fd0b410e25e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1567.633661] env[63024]: DEBUG oslo_vmware.api [None req-988d5bc0-fea9-4cd4-83c5-eae1d9fdd5b7 tempest-DeleteServersAdminTestJSON-2143423831 tempest-DeleteServersAdminTestJSON-2143423831-project-admin] Waiting for the task: (returnval){ [ 1567.633661] env[63024]: value = "task-1950349" [ 1567.633661] env[63024]: _type = "Task" [ 1567.633661] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1567.642221] env[63024]: DEBUG oslo_vmware.api [None req-988d5bc0-fea9-4cd4-83c5-eae1d9fdd5b7 tempest-DeleteServersAdminTestJSON-2143423831 tempest-DeleteServersAdminTestJSON-2143423831-project-admin] Task: {'id': task-1950349, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1567.922691] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1568.027245] env[63024]: DEBUG oslo_concurrency.lockutils [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Acquiring lock "7146277f-2621-4e8f-a14c-49bf4dd052db" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1568.027521] env[63024]: DEBUG oslo_concurrency.lockutils [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Lock "7146277f-2621-4e8f-a14c-49bf4dd052db" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1568.048253] env[63024]: DEBUG nova.compute.utils [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1568.055961] env[63024]: DEBUG nova.compute.manager [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] [instance: 51532b8e-4adf-4cc7-b91e-885d7934a7e8] Not allocating networking since 'none' was specified. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1568.150491] env[63024]: DEBUG oslo_vmware.api [None req-988d5bc0-fea9-4cd4-83c5-eae1d9fdd5b7 tempest-DeleteServersAdminTestJSON-2143423831 tempest-DeleteServersAdminTestJSON-2143423831-project-admin] Task: {'id': task-1950349, 'name': PowerOffVM_Task, 'duration_secs': 0.184636} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1568.150601] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-988d5bc0-fea9-4cd4-83c5-eae1d9fdd5b7 tempest-DeleteServersAdminTestJSON-2143423831 tempest-DeleteServersAdminTestJSON-2143423831-project-admin] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1568.150915] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-988d5bc0-fea9-4cd4-83c5-eae1d9fdd5b7 tempest-DeleteServersAdminTestJSON-2143423831 tempest-DeleteServersAdminTestJSON-2143423831-project-admin] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1568.151436] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fe6b1d68-3c5e-466d-8fac-b1c25b57985d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.532890] env[63024]: DEBUG nova.compute.manager [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1568.560594] env[63024]: DEBUG nova.compute.manager [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] [instance: 51532b8e-4adf-4cc7-b91e-885d7934a7e8] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1568.574019] env[63024]: INFO nova.compute.manager [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Rescuing [ 1568.574019] env[63024]: DEBUG oslo_concurrency.lockutils [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Acquiring lock "refresh_cache-ccd80e20-9fc2-415a-a428-fcf85994c7f8" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1568.574019] env[63024]: DEBUG oslo_concurrency.lockutils [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Acquired lock "refresh_cache-ccd80e20-9fc2-415a-a428-fcf85994c7f8" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1568.574019] env[63024]: DEBUG nova.network.neutron [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1568.626123] env[63024]: DEBUG oslo_concurrency.lockutils [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Acquiring lock "de31255d-b82f-4f32-82b2-0a8368fe2510" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1568.626123] env[63024]: DEBUG oslo_concurrency.lockutils [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Lock "de31255d-b82f-4f32-82b2-0a8368fe2510" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1568.847132] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-319eb3a2-9f5f-4c19-8b41-7eee57d61c00 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.862227] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3bb9b07-5cdb-410b-8c78-c4755f13f25c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.895994] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d87f4e9c-1600-4e0a-b76b-6fd05354e7bc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.906733] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6107346c-f6a7-4ac3-98b0-1e9155fec479 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1568.921493] env[63024]: DEBUG nova.compute.provider_tree [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1569.054890] env[63024]: DEBUG oslo_concurrency.lockutils [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1569.057215] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-988d5bc0-fea9-4cd4-83c5-eae1d9fdd5b7 tempest-DeleteServersAdminTestJSON-2143423831 tempest-DeleteServersAdminTestJSON-2143423831-project-admin] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1569.057423] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-988d5bc0-fea9-4cd4-83c5-eae1d9fdd5b7 tempest-DeleteServersAdminTestJSON-2143423831 tempest-DeleteServersAdminTestJSON-2143423831-project-admin] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1569.057599] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-988d5bc0-fea9-4cd4-83c5-eae1d9fdd5b7 tempest-DeleteServersAdminTestJSON-2143423831 tempest-DeleteServersAdminTestJSON-2143423831-project-admin] Deleting the datastore file [datastore1] f90f35a2-f2ee-45e2-a9e4-afce50f29aa0 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1569.057842] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d9cbbfcc-3961-4823-9608-ca0c0468f6f9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.069898] env[63024]: DEBUG oslo_vmware.api [None req-988d5bc0-fea9-4cd4-83c5-eae1d9fdd5b7 tempest-DeleteServersAdminTestJSON-2143423831 tempest-DeleteServersAdminTestJSON-2143423831-project-admin] Waiting for the task: (returnval){ [ 1569.069898] env[63024]: value = "task-1950351" [ 1569.069898] env[63024]: _type = "Task" [ 1569.069898] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1569.082622] env[63024]: DEBUG oslo_vmware.api [None req-988d5bc0-fea9-4cd4-83c5-eae1d9fdd5b7 tempest-DeleteServersAdminTestJSON-2143423831 tempest-DeleteServersAdminTestJSON-2143423831-project-admin] Task: {'id': task-1950351, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.129091] env[63024]: DEBUG nova.compute.manager [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1569.239181] env[63024]: DEBUG oslo_concurrency.lockutils [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Acquiring lock "6e477ec2-9270-42b1-85bd-a315460d9cab" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1569.239437] env[63024]: DEBUG oslo_concurrency.lockutils [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Lock "6e477ec2-9270-42b1-85bd-a315460d9cab" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1569.325384] env[63024]: DEBUG nova.network.neutron [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Successfully updated port: 90fdf5d2-f22d-4b1f-9b65-a0975e5c1cd3 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1569.423998] env[63024]: DEBUG nova.scheduler.client.report [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1569.572178] env[63024]: DEBUG nova.compute.manager [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] [instance: 51532b8e-4adf-4cc7-b91e-885d7934a7e8] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1569.586137] env[63024]: DEBUG oslo_vmware.api [None req-988d5bc0-fea9-4cd4-83c5-eae1d9fdd5b7 tempest-DeleteServersAdminTestJSON-2143423831 tempest-DeleteServersAdminTestJSON-2143423831-project-admin] Task: {'id': task-1950351, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140645} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1569.586398] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-988d5bc0-fea9-4cd4-83c5-eae1d9fdd5b7 tempest-DeleteServersAdminTestJSON-2143423831 tempest-DeleteServersAdminTestJSON-2143423831-project-admin] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1569.586576] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-988d5bc0-fea9-4cd4-83c5-eae1d9fdd5b7 tempest-DeleteServersAdminTestJSON-2143423831 tempest-DeleteServersAdminTestJSON-2143423831-project-admin] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1569.586748] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-988d5bc0-fea9-4cd4-83c5-eae1d9fdd5b7 tempest-DeleteServersAdminTestJSON-2143423831 tempest-DeleteServersAdminTestJSON-2143423831-project-admin] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1569.586977] env[63024]: INFO nova.compute.manager [None req-988d5bc0-fea9-4cd4-83c5-eae1d9fdd5b7 tempest-DeleteServersAdminTestJSON-2143423831 tempest-DeleteServersAdminTestJSON-2143423831-project-admin] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Took 1.97 seconds to destroy the instance on the hypervisor. [ 1569.587251] env[63024]: DEBUG oslo.service.loopingcall [None req-988d5bc0-fea9-4cd4-83c5-eae1d9fdd5b7 tempest-DeleteServersAdminTestJSON-2143423831 tempest-DeleteServersAdminTestJSON-2143423831-project-admin] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1569.587440] env[63024]: DEBUG nova.compute.manager [-] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1569.587533] env[63024]: DEBUG nova.network.neutron [-] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1569.609544] env[63024]: DEBUG nova.virt.hardware [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1569.609777] env[63024]: DEBUG nova.virt.hardware [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1569.609928] env[63024]: DEBUG nova.virt.hardware [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1569.610132] env[63024]: DEBUG nova.virt.hardware [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1569.610281] env[63024]: DEBUG nova.virt.hardware [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1569.610422] env[63024]: DEBUG nova.virt.hardware [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1569.610623] env[63024]: DEBUG nova.virt.hardware [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1569.610774] env[63024]: DEBUG nova.virt.hardware [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1569.610931] env[63024]: DEBUG nova.virt.hardware [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1569.611736] env[63024]: DEBUG nova.virt.hardware [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1569.611956] env[63024]: DEBUG nova.virt.hardware [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1569.613319] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe62b52a-716f-48b0-9ad8-c02c643638dc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.627983] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a254ebb-7a18-4591-9605-3ca0de4fc02e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.648656] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] [instance: 51532b8e-4adf-4cc7-b91e-885d7934a7e8] Instance VIF info [] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1569.656090] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Creating folder: Project (fa435be6fbd241a7958627be982c7757). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1569.658916] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-28b63496-eba7-4722-be87-f97e361917c6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.664975] env[63024]: DEBUG oslo_concurrency.lockutils [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1569.677309] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Created folder: Project (fa435be6fbd241a7958627be982c7757) in parent group-v401959. [ 1569.678624] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Creating folder: Instances. Parent ref: group-v401972. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1569.678624] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cd23ec25-1493-450e-b8b0-f2cd7c37f3f3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.690602] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Created folder: Instances in parent group-v401972. [ 1569.690946] env[63024]: DEBUG oslo.service.loopingcall [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1569.691062] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 51532b8e-4adf-4cc7-b91e-885d7934a7e8] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1569.691271] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-475eb18e-835d-4865-95ac-14b41168983c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1569.710922] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1569.710922] env[63024]: value = "task-1950354" [ 1569.710922] env[63024]: _type = "Task" [ 1569.710922] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1569.720506] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950354, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1569.743035] env[63024]: DEBUG nova.compute.manager [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1569.832190] env[63024]: DEBUG oslo_concurrency.lockutils [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Acquiring lock "refresh_cache-f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1569.833194] env[63024]: DEBUG oslo_concurrency.lockutils [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Acquired lock "refresh_cache-f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1569.833194] env[63024]: DEBUG nova.network.neutron [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1569.841666] env[63024]: DEBUG nova.network.neutron [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Updating instance_info_cache with network_info: [{"id": "223e2d99-de00-4474-8225-7fee75ac28d6", "address": "fa:16:3e:83:4e:39", "network": {"id": "f42f7ff4-b2ef-45fd-8230-5f16271d5808", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-607041553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "5489a064ee1d44f0bd6c496f4775b9d6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56834f67-27a8-43dc-bbc6-a74aaa08959b", "external-id": "nsx-vlan-transportzone-949", "segmentation_id": 949, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap223e2d99-de", "ovs_interfaceid": "223e2d99-de00-4474-8225-7fee75ac28d6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1569.936832] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.401s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1569.937867] env[63024]: DEBUG nova.compute.manager [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1569.942151] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.197s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1569.943580] env[63024]: INFO nova.compute.claims [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1570.222641] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950354, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.279868] env[63024]: DEBUG oslo_concurrency.lockutils [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1570.346819] env[63024]: DEBUG oslo_concurrency.lockutils [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Releasing lock "refresh_cache-ccd80e20-9fc2-415a-a428-fcf85994c7f8" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1570.380347] env[63024]: DEBUG nova.network.neutron [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1570.396811] env[63024]: DEBUG nova.network.neutron [-] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1570.452955] env[63024]: DEBUG nova.compute.utils [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1570.454329] env[63024]: DEBUG nova.compute.manager [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1570.454503] env[63024]: DEBUG nova.network.neutron [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1570.509272] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Acquiring lock "04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1570.509503] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Lock "04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1570.552680] env[63024]: DEBUG nova.network.neutron [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Updating instance_info_cache with network_info: [{"id": "90fdf5d2-f22d-4b1f-9b65-a0975e5c1cd3", "address": "fa:16:3e:64:8e:6a", "network": {"id": "b22bfe1a-f169-41c3-ac9b-fdcf93268110", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d78401abb63840f4b461856cfdb6dbbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90fdf5d2-f2", "ovs_interfaceid": "90fdf5d2-f22d-4b1f-9b65-a0975e5c1cd3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1570.628306] env[63024]: DEBUG nova.policy [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '62964d784b2f4b3fbd8b869ab299eb7a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c720cdab04804a8390b825059692c3f3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1570.698321] env[63024]: DEBUG nova.network.neutron [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Successfully updated port: 7744ae27-9eae-4bcd-b5d8-425150caba4f {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1570.727148] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950354, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1570.787153] env[63024]: DEBUG nova.network.neutron [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Successfully updated port: 421d2adb-43a2-41f5-b64d-29989f6a0fa5 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1570.899327] env[63024]: INFO nova.compute.manager [-] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Took 1.31 seconds to deallocate network for instance. [ 1570.917012] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Acquiring lock "726d9639-1ab4-46a9-975e-5580c8344a37" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1570.917585] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Lock "726d9639-1ab4-46a9-975e-5580c8344a37" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1570.961509] env[63024]: DEBUG nova.compute.manager [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1571.020102] env[63024]: DEBUG nova.compute.manager [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1571.059307] env[63024]: DEBUG oslo_concurrency.lockutils [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Releasing lock "refresh_cache-f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1571.060258] env[63024]: DEBUG nova.compute.manager [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Instance network_info: |[{"id": "90fdf5d2-f22d-4b1f-9b65-a0975e5c1cd3", "address": "fa:16:3e:64:8e:6a", "network": {"id": "b22bfe1a-f169-41c3-ac9b-fdcf93268110", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d78401abb63840f4b461856cfdb6dbbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90fdf5d2-f2", "ovs_interfaceid": "90fdf5d2-f22d-4b1f-9b65-a0975e5c1cd3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1571.061616] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:64:8e:6a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8652fa2-e608-4fe6-8b35-402a40906b40', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '90fdf5d2-f22d-4b1f-9b65-a0975e5c1cd3', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1571.070070] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Creating folder: Project (8a5452991c0c433d987f52bad5c89d22). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1571.075768] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b9347d13-2657-4a6d-9cdf-095d32ef4688 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.087478] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Created folder: Project (8a5452991c0c433d987f52bad5c89d22) in parent group-v401959. [ 1571.087711] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Creating folder: Instances. Parent ref: group-v401975. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1571.087962] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-af62dec8-6652-439f-a8e3-b1571961b419 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.100231] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Created folder: Instances in parent group-v401975. [ 1571.100507] env[63024]: DEBUG oslo.service.loopingcall [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1571.101372] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1571.101372] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3b3c8c9d-a17e-4d04-b13a-db32f488bb9c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.127632] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1571.127632] env[63024]: value = "task-1950357" [ 1571.127632] env[63024]: _type = "Task" [ 1571.127632] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.142107] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950357, 'name': CreateVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.203494] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Acquiring lock "refresh_cache-22ef5bae-f7bc-43c7-9d77-1b4547e83b24" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1571.203494] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Acquired lock "refresh_cache-22ef5bae-f7bc-43c7-9d77-1b4547e83b24" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1571.203494] env[63024]: DEBUG nova.network.neutron [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1571.230353] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950354, 'name': CreateVM_Task, 'duration_secs': 1.388604} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1571.230583] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 51532b8e-4adf-4cc7-b91e-885d7934a7e8] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1571.231200] env[63024]: DEBUG oslo_concurrency.lockutils [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1571.231373] env[63024]: DEBUG oslo_concurrency.lockutils [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1571.231864] env[63024]: DEBUG oslo_concurrency.lockutils [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1571.231977] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f41033b-0ff9-4f32-8234-97d3717de542 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.241063] env[63024]: DEBUG oslo_vmware.api [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Waiting for the task: (returnval){ [ 1571.241063] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5284e69e-05a3-77b9-2dc6-5efb1a4aef4d" [ 1571.241063] env[63024]: _type = "Task" [ 1571.241063] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.250520] env[63024]: DEBUG oslo_vmware.api [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5284e69e-05a3-77b9-2dc6-5efb1a4aef4d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.261669] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70c736be-7277-48f0-bb12-4764d5f9c892 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.273329] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60c01402-542a-4910-9a1e-f1a600ca0738 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.305269] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Acquiring lock "refresh_cache-61fdfa06-cb40-44a3-8abc-428b26bd40f5" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1571.305269] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Acquired lock "refresh_cache-61fdfa06-cb40-44a3-8abc-428b26bd40f5" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1571.305269] env[63024]: DEBUG nova.network.neutron [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1571.309569] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03fe6184-520f-42a5-94f4-f82bb7d80bab {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.315300] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad00206b-27dd-4756-a2fb-20f6ee8e91dd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.330279] env[63024]: DEBUG nova.compute.provider_tree [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1571.400336] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1571.400642] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8c24f4b7-78fa-451a-9d0d-58f6f04bae82 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.409159] env[63024]: DEBUG oslo_concurrency.lockutils [None req-988d5bc0-fea9-4cd4-83c5-eae1d9fdd5b7 tempest-DeleteServersAdminTestJSON-2143423831 tempest-DeleteServersAdminTestJSON-2143423831-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1571.409516] env[63024]: DEBUG oslo_vmware.api [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Waiting for the task: (returnval){ [ 1571.409516] env[63024]: value = "task-1950358" [ 1571.409516] env[63024]: _type = "Task" [ 1571.409516] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.419978] env[63024]: DEBUG oslo_vmware.api [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950358, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.478568] env[63024]: DEBUG oslo_concurrency.lockutils [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Acquiring lock "17e1dfa2-b104-4aac-928e-6364da155c3d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1571.479185] env[63024]: DEBUG oslo_concurrency.lockutils [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Lock "17e1dfa2-b104-4aac-928e-6364da155c3d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1571.530644] env[63024]: DEBUG nova.compute.manager [req-38ffe44a-7942-4220-8e40-dca8ee4b12a9 req-9cc57eca-bc73-47d2-a242-cc2e4dea080c service nova] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Received event network-vif-plugged-90fdf5d2-f22d-4b1f-9b65-a0975e5c1cd3 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1571.530949] env[63024]: DEBUG oslo_concurrency.lockutils [req-38ffe44a-7942-4220-8e40-dca8ee4b12a9 req-9cc57eca-bc73-47d2-a242-cc2e4dea080c service nova] Acquiring lock "f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1571.531197] env[63024]: DEBUG oslo_concurrency.lockutils [req-38ffe44a-7942-4220-8e40-dca8ee4b12a9 req-9cc57eca-bc73-47d2-a242-cc2e4dea080c service nova] Lock "f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1571.531246] env[63024]: DEBUG oslo_concurrency.lockutils [req-38ffe44a-7942-4220-8e40-dca8ee4b12a9 req-9cc57eca-bc73-47d2-a242-cc2e4dea080c service nova] Lock "f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1571.531396] env[63024]: DEBUG nova.compute.manager [req-38ffe44a-7942-4220-8e40-dca8ee4b12a9 req-9cc57eca-bc73-47d2-a242-cc2e4dea080c service nova] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] No waiting events found dispatching network-vif-plugged-90fdf5d2-f22d-4b1f-9b65-a0975e5c1cd3 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1571.531567] env[63024]: WARNING nova.compute.manager [req-38ffe44a-7942-4220-8e40-dca8ee4b12a9 req-9cc57eca-bc73-47d2-a242-cc2e4dea080c service nova] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Received unexpected event network-vif-plugged-90fdf5d2-f22d-4b1f-9b65-a0975e5c1cd3 for instance with vm_state building and task_state spawning. [ 1571.532525] env[63024]: DEBUG nova.network.neutron [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] Successfully created port: 468acd69-27ef-4644-8085-504eba6c7955 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1571.566572] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1571.639444] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950357, 'name': CreateVM_Task, 'duration_secs': 0.497647} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1571.639684] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1571.640467] env[63024]: DEBUG oslo_concurrency.lockutils [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1571.755421] env[63024]: DEBUG oslo_vmware.api [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5284e69e-05a3-77b9-2dc6-5efb1a4aef4d, 'name': SearchDatastore_Task, 'duration_secs': 0.013287} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1571.756034] env[63024]: DEBUG oslo_concurrency.lockutils [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1571.756113] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] [instance: 51532b8e-4adf-4cc7-b91e-885d7934a7e8] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1571.756362] env[63024]: DEBUG oslo_concurrency.lockutils [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1571.756665] env[63024]: DEBUG oslo_concurrency.lockutils [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1571.756758] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1571.757177] env[63024]: DEBUG oslo_concurrency.lockutils [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1571.758752] env[63024]: DEBUG oslo_concurrency.lockutils [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1571.758752] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a78b0c24-fdf5-4504-a79f-6e825834ed8b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.760041] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fcccf703-5022-4c27-88f8-63ef0e70bf46 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.766025] env[63024]: DEBUG oslo_vmware.api [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Waiting for the task: (returnval){ [ 1571.766025] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52aea56e-b63b-ef88-e9ac-6f43c7c78b48" [ 1571.766025] env[63024]: _type = "Task" [ 1571.766025] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.770504] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1571.770504] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1571.771609] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c8c1fd12-2681-41f1-892d-1f6798677664 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.776103] env[63024]: DEBUG oslo_vmware.api [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52aea56e-b63b-ef88-e9ac-6f43c7c78b48, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.778888] env[63024]: DEBUG oslo_vmware.api [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Waiting for the task: (returnval){ [ 1571.778888] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]529c911d-2eb5-0b3c-7b92-4b939a38946b" [ 1571.778888] env[63024]: _type = "Task" [ 1571.778888] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1571.787936] env[63024]: DEBUG oslo_vmware.api [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]529c911d-2eb5-0b3c-7b92-4b939a38946b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1571.808092] env[63024]: DEBUG nova.compute.manager [None req-c48082a4-eda9-4a2f-b335-d346cf978e89 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1571.808842] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0c62d6d-01be-485d-9806-d37027e62080 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.836200] env[63024]: DEBUG nova.scheduler.client.report [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1571.922741] env[63024]: DEBUG oslo_vmware.api [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950358, 'name': PowerOffVM_Task, 'duration_secs': 0.22536} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1571.923038] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1571.923899] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fd1a729-35ac-45b6-85ca-8fd0e34b9ece {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.950184] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00868790-3161-4b4f-abc4-c0c60acb799c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.965816] env[63024]: DEBUG nova.network.neutron [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1571.973000] env[63024]: DEBUG nova.compute.manager [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1571.986023] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1571.986298] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c81595cf-a0d2-4b2a-85e3-ef1fae18c3a4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1571.991849] env[63024]: DEBUG nova.network.neutron [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1571.997981] env[63024]: DEBUG oslo_vmware.api [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Waiting for the task: (returnval){ [ 1571.997981] env[63024]: value = "task-1950359" [ 1571.997981] env[63024]: _type = "Task" [ 1571.997981] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1572.005773] env[63024]: DEBUG nova.virt.hardware [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1572.006086] env[63024]: DEBUG nova.virt.hardware [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1572.006281] env[63024]: DEBUG nova.virt.hardware [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1572.006471] env[63024]: DEBUG nova.virt.hardware [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1572.006681] env[63024]: DEBUG nova.virt.hardware [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1572.006845] env[63024]: DEBUG nova.virt.hardware [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1572.007138] env[63024]: DEBUG nova.virt.hardware [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1572.007331] env[63024]: DEBUG nova.virt.hardware [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1572.007544] env[63024]: DEBUG nova.virt.hardware [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1572.007719] env[63024]: DEBUG nova.virt.hardware [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1572.007909] env[63024]: DEBUG nova.virt.hardware [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1572.008803] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e455c2b2-3457-484f-ad7e-2435bf0d4f7d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.017631] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] VM already powered off {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1572.017631] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1572.017631] env[63024]: DEBUG oslo_concurrency.lockutils [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1572.024402] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fe993f5-637d-4088-9674-13fe6fe429a4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.276849] env[63024]: DEBUG oslo_vmware.api [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52aea56e-b63b-ef88-e9ac-6f43c7c78b48, 'name': SearchDatastore_Task, 'duration_secs': 0.00981} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1572.276849] env[63024]: DEBUG oslo_concurrency.lockutils [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1572.276849] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1572.277253] env[63024]: DEBUG oslo_concurrency.lockutils [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1572.289111] env[63024]: DEBUG oslo_vmware.api [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]529c911d-2eb5-0b3c-7b92-4b939a38946b, 'name': SearchDatastore_Task, 'duration_secs': 0.012047} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1572.290337] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c75bd810-ec40-473d-b6c7-c706416930fc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.295317] env[63024]: DEBUG oslo_vmware.api [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Waiting for the task: (returnval){ [ 1572.295317] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5220f904-f64e-6426-15dd-93b695a1dbbb" [ 1572.295317] env[63024]: _type = "Task" [ 1572.295317] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1572.305247] env[63024]: DEBUG oslo_vmware.api [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5220f904-f64e-6426-15dd-93b695a1dbbb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.321549] env[63024]: INFO nova.compute.manager [None req-c48082a4-eda9-4a2f-b335-d346cf978e89 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] instance snapshotting [ 1572.321813] env[63024]: WARNING nova.compute.manager [None req-c48082a4-eda9-4a2f-b335-d346cf978e89 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] trying to snapshot a non-running instance: (state: 4 expected: 1) [ 1572.324629] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fb45a1a-c1ca-4164-9a84-43455b384d0a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.347231] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.405s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1572.347771] env[63024]: DEBUG nova.compute.manager [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1572.354525] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 4.432s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1572.354709] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1572.354892] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63024) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1572.355191] env[63024]: DEBUG oslo_concurrency.lockutils [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.300s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1572.358052] env[63024]: INFO nova.compute.claims [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1572.364064] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b26fc042-4eb1-4b25-81b2-3e2176f67fb7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.365865] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e1f9cb5-455e-480b-adda-82ff118f13a1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.384060] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fac60719-ef7e-4538-994a-3209d0914d81 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.401435] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a6a7206-e28c-4a57-a9f2-3181e5f1396e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.409865] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ede60146-e3ca-4929-86b5-63f72ffa097e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.446812] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181306MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=63024) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1572.447065] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1572.537782] env[63024]: DEBUG nova.compute.manager [req-83db2770-b66c-46a8-bb31-0596249a306e req-edf9645b-ed36-4ab2-9c24-c4d199337b97 service nova] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Received event network-vif-plugged-7744ae27-9eae-4bcd-b5d8-425150caba4f {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1572.538019] env[63024]: DEBUG oslo_concurrency.lockutils [req-83db2770-b66c-46a8-bb31-0596249a306e req-edf9645b-ed36-4ab2-9c24-c4d199337b97 service nova] Acquiring lock "22ef5bae-f7bc-43c7-9d77-1b4547e83b24-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1572.538226] env[63024]: DEBUG oslo_concurrency.lockutils [req-83db2770-b66c-46a8-bb31-0596249a306e req-edf9645b-ed36-4ab2-9c24-c4d199337b97 service nova] Lock "22ef5bae-f7bc-43c7-9d77-1b4547e83b24-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1572.538385] env[63024]: DEBUG oslo_concurrency.lockutils [req-83db2770-b66c-46a8-bb31-0596249a306e req-edf9645b-ed36-4ab2-9c24-c4d199337b97 service nova] Lock "22ef5bae-f7bc-43c7-9d77-1b4547e83b24-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1572.538540] env[63024]: DEBUG nova.compute.manager [req-83db2770-b66c-46a8-bb31-0596249a306e req-edf9645b-ed36-4ab2-9c24-c4d199337b97 service nova] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] No waiting events found dispatching network-vif-plugged-7744ae27-9eae-4bcd-b5d8-425150caba4f {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1572.538691] env[63024]: WARNING nova.compute.manager [req-83db2770-b66c-46a8-bb31-0596249a306e req-edf9645b-ed36-4ab2-9c24-c4d199337b97 service nova] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Received unexpected event network-vif-plugged-7744ae27-9eae-4bcd-b5d8-425150caba4f for instance with vm_state building and task_state spawning. [ 1572.618974] env[63024]: DEBUG nova.network.neutron [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Updating instance_info_cache with network_info: [{"id": "7744ae27-9eae-4bcd-b5d8-425150caba4f", "address": "fa:16:3e:0c:e4:50", "network": {"id": "f48a8d6b-d27a-400e-952d-435733c93214", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1457082334-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd1cc46013d64c54a68b24c282ffe48b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2be3fdb5-359e-43bd-8c20-2ff00e81db55", "external-id": "nsx-vlan-transportzone-986", "segmentation_id": 986, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7744ae27-9e", "ovs_interfaceid": "7744ae27-9eae-4bcd-b5d8-425150caba4f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1572.680826] env[63024]: DEBUG nova.network.neutron [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Updating instance_info_cache with network_info: [{"id": "421d2adb-43a2-41f5-b64d-29989f6a0fa5", "address": "fa:16:3e:9e:9f:76", "network": {"id": "ab69012e-45b8-478c-9986-252d2f87d7d9", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1505663829-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e2bf9d113204b598844e72d5d49f155", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bed837fa-6b6a-4192-a229-a99426a46065", "external-id": "nsx-vlan-transportzone-954", "segmentation_id": 954, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap421d2adb-43", "ovs_interfaceid": "421d2adb-43a2-41f5-b64d-29989f6a0fa5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1572.807237] env[63024]: DEBUG oslo_vmware.api [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5220f904-f64e-6426-15dd-93b695a1dbbb, 'name': SearchDatastore_Task, 'duration_secs': 0.010413} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1572.807561] env[63024]: DEBUG oslo_concurrency.lockutils [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1572.807756] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 51532b8e-4adf-4cc7-b91e-885d7934a7e8/51532b8e-4adf-4cc7-b91e-885d7934a7e8.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1572.808094] env[63024]: DEBUG oslo_concurrency.lockutils [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1572.808286] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1572.808521] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3d4e5e9b-8e4f-4d0b-b2f2-1765aa8a96a0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.810879] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b7622cb7-c35b-431a-a600-f401c8ef339b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.818671] env[63024]: DEBUG oslo_vmware.api [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Waiting for the task: (returnval){ [ 1572.818671] env[63024]: value = "task-1950360" [ 1572.818671] env[63024]: _type = "Task" [ 1572.818671] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1572.820053] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1572.820243] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1572.824177] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bbb9e182-b5c1-4e66-b53c-0a4b00621d72 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.830078] env[63024]: DEBUG oslo_vmware.api [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Waiting for the task: (returnval){ [ 1572.830078] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5242d379-29a4-ba52-b961-f73991584e27" [ 1572.830078] env[63024]: _type = "Task" [ 1572.830078] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1572.833766] env[63024]: DEBUG oslo_vmware.api [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Task: {'id': task-1950360, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.848535] env[63024]: DEBUG oslo_vmware.api [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5242d379-29a4-ba52-b961-f73991584e27, 'name': SearchDatastore_Task, 'duration_secs': 0.010426} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1572.848535] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-553acb2d-de7b-468d-8df2-0f5122761cb1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.853031] env[63024]: DEBUG oslo_vmware.api [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Waiting for the task: (returnval){ [ 1572.853031] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52ca6c73-d00a-fa6b-3241-e464eba70ad9" [ 1572.853031] env[63024]: _type = "Task" [ 1572.853031] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1572.860088] env[63024]: DEBUG oslo_vmware.api [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52ca6c73-d00a-fa6b-3241-e464eba70ad9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.866949] env[63024]: DEBUG nova.compute.utils [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1572.871692] env[63024]: DEBUG nova.compute.manager [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1572.871692] env[63024]: DEBUG nova.network.neutron [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1572.892772] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c48082a4-eda9-4a2f-b335-d346cf978e89 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Creating Snapshot of the VM instance {{(pid=63024) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1572.892772] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-414b1475-31b5-4f80-9e57-7038296158e9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1572.901455] env[63024]: DEBUG oslo_vmware.api [None req-c48082a4-eda9-4a2f-b335-d346cf978e89 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1572.901455] env[63024]: value = "task-1950361" [ 1572.901455] env[63024]: _type = "Task" [ 1572.901455] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1572.922444] env[63024]: DEBUG oslo_vmware.api [None req-c48082a4-eda9-4a2f-b335-d346cf978e89 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950361, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1572.944338] env[63024]: DEBUG nova.policy [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '46c4f7e1614342b1884f12f06255bec7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9860b12ec09944ddacb54f69a18d4c4c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1573.122481] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Releasing lock "refresh_cache-22ef5bae-f7bc-43c7-9d77-1b4547e83b24" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1573.122946] env[63024]: DEBUG nova.compute.manager [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Instance network_info: |[{"id": "7744ae27-9eae-4bcd-b5d8-425150caba4f", "address": "fa:16:3e:0c:e4:50", "network": {"id": "f48a8d6b-d27a-400e-952d-435733c93214", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1457082334-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd1cc46013d64c54a68b24c282ffe48b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2be3fdb5-359e-43bd-8c20-2ff00e81db55", "external-id": "nsx-vlan-transportzone-986", "segmentation_id": 986, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7744ae27-9e", "ovs_interfaceid": "7744ae27-9eae-4bcd-b5d8-425150caba4f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1573.124847] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0c:e4:50', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2be3fdb5-359e-43bd-8c20-2ff00e81db55', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7744ae27-9eae-4bcd-b5d8-425150caba4f', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1573.136893] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Creating folder: Project (fd1cc46013d64c54a68b24c282ffe48b). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1573.138976] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-79859f80-3a25-4a1c-921c-8f089533484d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.149309] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Created folder: Project (fd1cc46013d64c54a68b24c282ffe48b) in parent group-v401959. [ 1573.149309] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Creating folder: Instances. Parent ref: group-v401978. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1573.149309] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ad39265c-e55b-40df-9de1-480fa4681b5c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.159017] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Created folder: Instances in parent group-v401978. [ 1573.159865] env[63024]: DEBUG oslo.service.loopingcall [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1573.160475] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1573.160751] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fb3cad72-0adc-430d-8d9c-ec9770bdeb17 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.183437] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Releasing lock "refresh_cache-61fdfa06-cb40-44a3-8abc-428b26bd40f5" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1573.183841] env[63024]: DEBUG nova.compute.manager [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Instance network_info: |[{"id": "421d2adb-43a2-41f5-b64d-29989f6a0fa5", "address": "fa:16:3e:9e:9f:76", "network": {"id": "ab69012e-45b8-478c-9986-252d2f87d7d9", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1505663829-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e2bf9d113204b598844e72d5d49f155", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bed837fa-6b6a-4192-a229-a99426a46065", "external-id": "nsx-vlan-transportzone-954", "segmentation_id": 954, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap421d2adb-43", "ovs_interfaceid": "421d2adb-43a2-41f5-b64d-29989f6a0fa5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1573.184351] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9e:9f:76', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bed837fa-6b6a-4192-a229-a99426a46065', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '421d2adb-43a2-41f5-b64d-29989f6a0fa5', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1573.193504] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Creating folder: Project (7e2bf9d113204b598844e72d5d49f155). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1573.195135] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a5e50587-f0fd-4bd7-b0b9-a17b3bff46f1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.197501] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1573.197501] env[63024]: value = "task-1950364" [ 1573.197501] env[63024]: _type = "Task" [ 1573.197501] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1573.209741] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950364, 'name': CreateVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.211073] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Created folder: Project (7e2bf9d113204b598844e72d5d49f155) in parent group-v401959. [ 1573.211296] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Creating folder: Instances. Parent ref: group-v401980. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1573.211545] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ed9f1002-7211-4131-9802-2189e0258d64 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.229177] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Created folder: Instances in parent group-v401980. [ 1573.229500] env[63024]: DEBUG oslo.service.loopingcall [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1573.229745] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1573.229992] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-658f8ed8-31e7-4cf3-84a3-df7f2556234e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.255400] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1573.255400] env[63024]: value = "task-1950367" [ 1573.255400] env[63024]: _type = "Task" [ 1573.255400] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1573.267201] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950367, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.332614] env[63024]: DEBUG oslo_vmware.api [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Task: {'id': task-1950360, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.362068] env[63024]: DEBUG oslo_vmware.api [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52ca6c73-d00a-fa6b-3241-e464eba70ad9, 'name': SearchDatastore_Task, 'duration_secs': 0.009086} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1573.362386] env[63024]: DEBUG oslo_concurrency.lockutils [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1573.362645] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] ccd80e20-9fc2-415a-a428-fcf85994c7f8/2646ca61-612e-4bc3-97f7-ee492c048835-rescue.vmdk. {{(pid=63024) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1573.363082] env[63024]: DEBUG oslo_concurrency.lockutils [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1573.363217] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1573.363439] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a7fb397e-8944-4cbd-aae0-53f938388640 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.365508] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b3d37469-b4f6-4943-8752-0346be9e0c5b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.375269] env[63024]: DEBUG nova.compute.manager [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1573.380316] env[63024]: DEBUG oslo_vmware.api [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Waiting for the task: (returnval){ [ 1573.380316] env[63024]: value = "task-1950368" [ 1573.380316] env[63024]: _type = "Task" [ 1573.380316] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1573.381967] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1573.382184] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1573.386354] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69c27725-f045-4661-af60-8d4b10017a30 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.397250] env[63024]: DEBUG oslo_vmware.api [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950368, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.398230] env[63024]: DEBUG oslo_vmware.api [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Waiting for the task: (returnval){ [ 1573.398230] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]521589f6-beb0-c143-7753-163d23fec5aa" [ 1573.398230] env[63024]: _type = "Task" [ 1573.398230] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1573.409527] env[63024]: DEBUG oslo_vmware.api [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]521589f6-beb0-c143-7753-163d23fec5aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.418909] env[63024]: DEBUG oslo_vmware.api [None req-c48082a4-eda9-4a2f-b335-d346cf978e89 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950361, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.549208] env[63024]: DEBUG oslo_concurrency.lockutils [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Acquiring lock "e2138192-14e0-43d2-9d19-9820747d7217" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1573.549208] env[63024]: DEBUG oslo_concurrency.lockutils [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Lock "e2138192-14e0-43d2-9d19-9820747d7217" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1573.704569] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e414e315-dcb2-4c01-b435-4fd027371b7d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.712259] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950364, 'name': CreateVM_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.717713] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1de6e31-b0ec-48bd-8d6a-66bb44539ad8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.755448] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be5097c6-47b7-4df9-802d-bc96220df35d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.767116] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950367, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.771665] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e4c48ab-0f10-404c-879a-40517f8e3509 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.785918] env[63024]: DEBUG nova.compute.provider_tree [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1573.835266] env[63024]: DEBUG oslo_vmware.api [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Task: {'id': task-1950360, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.644433} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1573.835970] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 51532b8e-4adf-4cc7-b91e-885d7934a7e8/51532b8e-4adf-4cc7-b91e-885d7934a7e8.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1573.835970] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] [instance: 51532b8e-4adf-4cc7-b91e-885d7934a7e8] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1573.836729] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-46eced8f-f0e8-48dd-b948-c8540a317c07 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.848759] env[63024]: DEBUG oslo_vmware.api [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Waiting for the task: (returnval){ [ 1573.848759] env[63024]: value = "task-1950369" [ 1573.848759] env[63024]: _type = "Task" [ 1573.848759] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1573.857886] env[63024]: DEBUG oslo_vmware.api [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Task: {'id': task-1950369, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.899390] env[63024]: DEBUG oslo_vmware.api [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950368, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.915248] env[63024]: DEBUG oslo_vmware.api [None req-c48082a4-eda9-4a2f-b335-d346cf978e89 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950361, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1573.926027] env[63024]: DEBUG oslo_vmware.api [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]521589f6-beb0-c143-7753-163d23fec5aa, 'name': SearchDatastore_Task, 'duration_secs': 0.038754} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1573.926027] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b4565ba-1bfb-4e8f-94de-0e0a3ba15c3a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1573.929602] env[63024]: DEBUG oslo_vmware.api [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Waiting for the task: (returnval){ [ 1573.929602] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52bf2166-fa72-e7d1-5a2e-4862caddd6f4" [ 1573.929602] env[63024]: _type = "Task" [ 1573.929602] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1573.942479] env[63024]: DEBUG oslo_vmware.api [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52bf2166-fa72-e7d1-5a2e-4862caddd6f4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.211969] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950364, 'name': CreateVM_Task, 'duration_secs': 0.935636} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1574.212374] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1574.213175] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1574.213402] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1574.213766] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1574.214056] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-581b4b03-68b0-4e9b-be15-3c6155c0a096 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.219289] env[63024]: DEBUG oslo_vmware.api [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Waiting for the task: (returnval){ [ 1574.219289] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52f5a27a-03b1-6b22-75ee-871ac9ac9cda" [ 1574.219289] env[63024]: _type = "Task" [ 1574.219289] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.228263] env[63024]: DEBUG oslo_concurrency.lockutils [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Acquiring lock "b765b8b3-a099-4e23-be30-d1178ecffc37" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1574.228485] env[63024]: DEBUG oslo_concurrency.lockutils [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Lock "b765b8b3-a099-4e23-be30-d1178ecffc37" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1574.236074] env[63024]: DEBUG oslo_vmware.api [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52f5a27a-03b1-6b22-75ee-871ac9ac9cda, 'name': SearchDatastore_Task, 'duration_secs': 0.008204} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1574.236403] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1574.236650] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1574.236878] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1574.269659] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950367, 'name': CreateVM_Task, 'duration_secs': 0.612335} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1574.269879] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1574.270665] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1574.270832] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1574.271175] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1574.271413] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43b555b0-fb19-4d35-86e9-0c9055ba82ce {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.276480] env[63024]: DEBUG oslo_vmware.api [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Waiting for the task: (returnval){ [ 1574.276480] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]520408d4-59b3-cdb2-0b11-bf3550d76f73" [ 1574.276480] env[63024]: _type = "Task" [ 1574.276480] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.286126] env[63024]: DEBUG oslo_vmware.api [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]520408d4-59b3-cdb2-0b11-bf3550d76f73, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.289082] env[63024]: DEBUG nova.scheduler.client.report [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1574.358652] env[63024]: DEBUG oslo_vmware.api [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Task: {'id': task-1950369, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.092125} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1574.358652] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] [instance: 51532b8e-4adf-4cc7-b91e-885d7934a7e8] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1574.359485] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c8b4a4f-708f-4614-b5ff-bc2f30936c89 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.381264] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] [instance: 51532b8e-4adf-4cc7-b91e-885d7934a7e8] Reconfiguring VM instance instance-00000008 to attach disk [datastore1] 51532b8e-4adf-4cc7-b91e-885d7934a7e8/51532b8e-4adf-4cc7-b91e-885d7934a7e8.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1574.382670] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-68417824-8787-44af-8817-a71745caf4b0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.398565] env[63024]: DEBUG nova.network.neutron [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Successfully created port: d962584b-9fa7-4c73-b446-b432b537aafd {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1574.399671] env[63024]: DEBUG nova.compute.manager [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1574.410303] env[63024]: DEBUG oslo_vmware.api [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Waiting for the task: (returnval){ [ 1574.410303] env[63024]: value = "task-1950370" [ 1574.410303] env[63024]: _type = "Task" [ 1574.410303] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.414435] env[63024]: DEBUG oslo_vmware.api [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950368, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.755513} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1574.420189] env[63024]: INFO nova.virt.vmwareapi.ds_util [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] ccd80e20-9fc2-415a-a428-fcf85994c7f8/2646ca61-612e-4bc3-97f7-ee492c048835-rescue.vmdk. [ 1574.420336] env[63024]: DEBUG oslo_vmware.api [None req-c48082a4-eda9-4a2f-b335-d346cf978e89 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950361, 'name': CreateSnapshot_Task, 'duration_secs': 1.344179} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1574.422673] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ca696be-30d8-4581-ba5e-f35782c91ee2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.425424] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c48082a4-eda9-4a2f-b335-d346cf978e89 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Created Snapshot of the VM instance {{(pid=63024) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1574.426782] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-896f9b35-95b0-4366-99d4-406e20b7b81c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.432535] env[63024]: DEBUG oslo_vmware.api [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Task: {'id': task-1950370, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.434993] env[63024]: DEBUG nova.virt.hardware [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1574.435230] env[63024]: DEBUG nova.virt.hardware [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1574.435388] env[63024]: DEBUG nova.virt.hardware [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1574.435562] env[63024]: DEBUG nova.virt.hardware [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1574.435702] env[63024]: DEBUG nova.virt.hardware [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1574.435845] env[63024]: DEBUG nova.virt.hardware [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1574.436059] env[63024]: DEBUG nova.virt.hardware [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1574.436219] env[63024]: DEBUG nova.virt.hardware [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1574.436381] env[63024]: DEBUG nova.virt.hardware [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1574.436539] env[63024]: DEBUG nova.virt.hardware [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1574.436789] env[63024]: DEBUG nova.virt.hardware [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1574.440929] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a51674c-2c13-4e44-a08d-812338ce1a5e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.467228] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Reconfiguring VM instance instance-00000003 to attach disk [datastore1] ccd80e20-9fc2-415a-a428-fcf85994c7f8/2646ca61-612e-4bc3-97f7-ee492c048835-rescue.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1574.471771] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-222bc501-dcb9-49c1-8d60-30820cbc169a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.490638] env[63024]: DEBUG oslo_vmware.api [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52bf2166-fa72-e7d1-5a2e-4862caddd6f4, 'name': SearchDatastore_Task, 'duration_secs': 0.049018} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1574.491991] env[63024]: DEBUG oslo_concurrency.lockutils [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1574.492325] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df/f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1574.492580] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1574.492764] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1574.492970] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d8bc4112-de56-4e7c-b638-7fbda26ce96b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.496984] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-524d3640-0244-41ed-a570-a2de52225806 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.501286] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97e4abe4-0136-4c1f-aae3-82fc96a92487 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.505171] env[63024]: DEBUG oslo_vmware.api [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Waiting for the task: (returnval){ [ 1574.505171] env[63024]: value = "task-1950371" [ 1574.505171] env[63024]: _type = "Task" [ 1574.505171] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.506740] env[63024]: DEBUG oslo_vmware.api [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Waiting for the task: (returnval){ [ 1574.506740] env[63024]: value = "task-1950372" [ 1574.506740] env[63024]: _type = "Task" [ 1574.506740] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.524869] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1574.525279] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1574.526506] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74353e49-8a19-4029-978b-42a4e26e6899 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.532671] env[63024]: DEBUG oslo_vmware.api [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950371, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.536181] env[63024]: DEBUG oslo_vmware.api [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950372, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.537452] env[63024]: DEBUG oslo_vmware.api [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Waiting for the task: (returnval){ [ 1574.537452] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]527a8e49-6c95-5357-476d-736a417e2cfe" [ 1574.537452] env[63024]: _type = "Task" [ 1574.537452] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.545742] env[63024]: DEBUG oslo_vmware.api [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]527a8e49-6c95-5357-476d-736a417e2cfe, 'name': SearchDatastore_Task, 'duration_secs': 0.007846} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1574.547147] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c640939-39f8-4d09-b047-c50f86fecd89 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.552730] env[63024]: DEBUG oslo_vmware.api [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Waiting for the task: (returnval){ [ 1574.552730] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5219606c-acae-fd04-24eb-2a72d78784be" [ 1574.552730] env[63024]: _type = "Task" [ 1574.552730] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.560650] env[63024]: DEBUG oslo_vmware.api [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5219606c-acae-fd04-24eb-2a72d78784be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.787711] env[63024]: DEBUG oslo_vmware.api [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]520408d4-59b3-cdb2-0b11-bf3550d76f73, 'name': SearchDatastore_Task, 'duration_secs': 0.011648} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1574.788067] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1574.788521] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1574.788624] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1574.793470] env[63024]: DEBUG oslo_concurrency.lockutils [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.438s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1574.794042] env[63024]: DEBUG nova.compute.manager [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1574.798076] env[63024]: DEBUG oslo_concurrency.lockutils [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.133s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1574.799440] env[63024]: INFO nova.compute.claims [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1574.926530] env[63024]: DEBUG oslo_vmware.api [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Task: {'id': task-1950370, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1574.979169] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c48082a4-eda9-4a2f-b335-d346cf978e89 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Creating linked-clone VM from snapshot {{(pid=63024) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1574.979516] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-e12ed9ab-8812-47ad-bab4-7fb0755d43de {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1574.987814] env[63024]: DEBUG oslo_vmware.api [None req-c48082a4-eda9-4a2f-b335-d346cf978e89 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1574.987814] env[63024]: value = "task-1950373" [ 1574.987814] env[63024]: _type = "Task" [ 1574.987814] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1574.996201] env[63024]: DEBUG oslo_vmware.api [None req-c48082a4-eda9-4a2f-b335-d346cf978e89 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950373, 'name': CloneVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.018641] env[63024]: DEBUG oslo_vmware.api [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950371, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.029261] env[63024]: DEBUG oslo_vmware.api [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950372, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.458068} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1575.030159] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df/f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1575.030765] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1575.033156] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0661b5ad-f91d-4c3c-8d8d-33b2602b23d1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.038397] env[63024]: DEBUG oslo_vmware.api [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Waiting for the task: (returnval){ [ 1575.038397] env[63024]: value = "task-1950374" [ 1575.038397] env[63024]: _type = "Task" [ 1575.038397] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1575.045274] env[63024]: DEBUG oslo_vmware.api [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950374, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.062226] env[63024]: DEBUG oslo_vmware.api [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5219606c-acae-fd04-24eb-2a72d78784be, 'name': SearchDatastore_Task, 'duration_secs': 0.010427} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1575.062491] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1575.063017] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 22ef5bae-f7bc-43c7-9d77-1b4547e83b24/22ef5bae-f7bc-43c7-9d77-1b4547e83b24.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1575.063017] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1575.063187] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1575.063400] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7e9fdf1d-b0af-4e77-9cd7-9bd287e1d891 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.065436] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b5160bd2-04c4-4b96-a2c0-dd105d6c67bf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.072418] env[63024]: DEBUG oslo_vmware.api [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Waiting for the task: (returnval){ [ 1575.072418] env[63024]: value = "task-1950375" [ 1575.072418] env[63024]: _type = "Task" [ 1575.072418] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1575.076630] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1575.076938] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1575.077914] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-413dae8f-709e-496a-a492-d661d47bca8f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.085050] env[63024]: DEBUG oslo_vmware.api [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950375, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.088964] env[63024]: DEBUG oslo_vmware.api [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Waiting for the task: (returnval){ [ 1575.088964] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5253ca9b-9f72-397d-f378-c948f8f03ac5" [ 1575.088964] env[63024]: _type = "Task" [ 1575.088964] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1575.096195] env[63024]: DEBUG oslo_vmware.api [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5253ca9b-9f72-397d-f378-c948f8f03ac5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.300241] env[63024]: DEBUG nova.compute.utils [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1575.306024] env[63024]: DEBUG nova.compute.manager [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1575.306024] env[63024]: DEBUG nova.network.neutron [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1575.426601] env[63024]: DEBUG oslo_vmware.api [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Task: {'id': task-1950370, 'name': ReconfigVM_Task, 'duration_secs': 0.544085} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1575.426894] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] [instance: 51532b8e-4adf-4cc7-b91e-885d7934a7e8] Reconfigured VM instance instance-00000008 to attach disk [datastore1] 51532b8e-4adf-4cc7-b91e-885d7934a7e8/51532b8e-4adf-4cc7-b91e-885d7934a7e8.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1575.427550] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a96d34f2-d9ce-4efd-aaec-74cafec0195a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.435412] env[63024]: DEBUG oslo_vmware.api [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Waiting for the task: (returnval){ [ 1575.435412] env[63024]: value = "task-1950376" [ 1575.435412] env[63024]: _type = "Task" [ 1575.435412] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1575.440210] env[63024]: DEBUG nova.network.neutron [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] Successfully updated port: 468acd69-27ef-4644-8085-504eba6c7955 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1575.447457] env[63024]: DEBUG oslo_vmware.api [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Task: {'id': task-1950376, 'name': Rename_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.500174] env[63024]: DEBUG oslo_vmware.api [None req-c48082a4-eda9-4a2f-b335-d346cf978e89 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950373, 'name': CloneVM_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.509166] env[63024]: DEBUG nova.policy [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a04cbe70535a4a5eaac09a1fc4f90692', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '18ce2b5b54b54a71af0b71ba887dd2a4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1575.523840] env[63024]: DEBUG oslo_vmware.api [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950371, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.553096] env[63024]: DEBUG oslo_vmware.api [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950374, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068495} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1575.553352] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1575.554314] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ddda7b0-dc76-483e-a661-ff7481728a1e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.578651] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Reconfiguring VM instance instance-00000005 to attach disk [datastore1] f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df/f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1575.579143] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c2abf07e-f8ac-4f04-afe2-c051c2e11e5e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.611937] env[63024]: DEBUG oslo_vmware.api [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950375, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.617082] env[63024]: DEBUG oslo_vmware.api [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5253ca9b-9f72-397d-f378-c948f8f03ac5, 'name': SearchDatastore_Task, 'duration_secs': 0.017347} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1575.617477] env[63024]: DEBUG oslo_vmware.api [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Waiting for the task: (returnval){ [ 1575.617477] env[63024]: value = "task-1950377" [ 1575.617477] env[63024]: _type = "Task" [ 1575.617477] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1575.618351] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-278221db-3cb3-48ff-829b-1dcdfde5c57f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1575.641576] env[63024]: DEBUG oslo_vmware.api [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Waiting for the task: (returnval){ [ 1575.641576] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52df16e9-559e-f8b9-6cb0-51b0cb98404d" [ 1575.641576] env[63024]: _type = "Task" [ 1575.641576] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1575.641890] env[63024]: DEBUG oslo_vmware.api [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950377, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.650951] env[63024]: DEBUG oslo_vmware.api [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52df16e9-559e-f8b9-6cb0-51b0cb98404d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.715352] env[63024]: DEBUG oslo_concurrency.lockutils [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "9716d592-32d1-4f1d-b42b-1c8a7d81d2f2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1575.715655] env[63024]: DEBUG oslo_concurrency.lockutils [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "9716d592-32d1-4f1d-b42b-1c8a7d81d2f2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1575.807229] env[63024]: DEBUG nova.compute.manager [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1575.943783] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Acquiring lock "refresh_cache-2bfcd5e1-b1d9-4829-bea5-d8c460ceec16" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1575.943928] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Acquired lock "refresh_cache-2bfcd5e1-b1d9-4829-bea5-d8c460ceec16" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1575.944107] env[63024]: DEBUG nova.network.neutron [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1575.953196] env[63024]: DEBUG oslo_vmware.api [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Task: {'id': task-1950376, 'name': Rename_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1575.981260] env[63024]: DEBUG nova.compute.manager [req-4e733658-1cbe-43b3-9a06-af2ba936a3b1 req-c096c0df-4e4a-48a7-80c4-7992eb1d6191 service nova] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Received event network-changed-90fdf5d2-f22d-4b1f-9b65-a0975e5c1cd3 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1575.981576] env[63024]: DEBUG nova.compute.manager [req-4e733658-1cbe-43b3-9a06-af2ba936a3b1 req-c096c0df-4e4a-48a7-80c4-7992eb1d6191 service nova] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Refreshing instance network info cache due to event network-changed-90fdf5d2-f22d-4b1f-9b65-a0975e5c1cd3. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1575.983822] env[63024]: DEBUG oslo_concurrency.lockutils [req-4e733658-1cbe-43b3-9a06-af2ba936a3b1 req-c096c0df-4e4a-48a7-80c4-7992eb1d6191 service nova] Acquiring lock "refresh_cache-f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1575.983822] env[63024]: DEBUG oslo_concurrency.lockutils [req-4e733658-1cbe-43b3-9a06-af2ba936a3b1 req-c096c0df-4e4a-48a7-80c4-7992eb1d6191 service nova] Acquired lock "refresh_cache-f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1575.983822] env[63024]: DEBUG nova.network.neutron [req-4e733658-1cbe-43b3-9a06-af2ba936a3b1 req-c096c0df-4e4a-48a7-80c4-7992eb1d6191 service nova] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Refreshing network info cache for port 90fdf5d2-f22d-4b1f-9b65-a0975e5c1cd3 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1576.005511] env[63024]: DEBUG oslo_vmware.api [None req-c48082a4-eda9-4a2f-b335-d346cf978e89 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950373, 'name': CloneVM_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.019451] env[63024]: DEBUG oslo_vmware.api [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950371, 'name': ReconfigVM_Task, 'duration_secs': 1.493386} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1576.019806] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Reconfigured VM instance instance-00000003 to attach disk [datastore1] ccd80e20-9fc2-415a-a428-fcf85994c7f8/2646ca61-612e-4bc3-97f7-ee492c048835-rescue.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1576.020854] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2fa4038-e1d9-4a3b-afa5-f73af6963598 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.052310] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b68913f3-e685-47ec-9df8-87c0c05b718a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.069894] env[63024]: DEBUG oslo_vmware.api [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Waiting for the task: (returnval){ [ 1576.069894] env[63024]: value = "task-1950378" [ 1576.069894] env[63024]: _type = "Task" [ 1576.069894] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.080296] env[63024]: DEBUG oslo_vmware.api [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950378, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.092456] env[63024]: DEBUG oslo_vmware.api [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950375, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.77931} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1576.092730] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 22ef5bae-f7bc-43c7-9d77-1b4547e83b24/22ef5bae-f7bc-43c7-9d77-1b4547e83b24.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1576.092938] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1576.094021] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9f65b962-8273-40ca-add7-c68f935567eb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.102498] env[63024]: DEBUG oslo_vmware.api [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Waiting for the task: (returnval){ [ 1576.102498] env[63024]: value = "task-1950379" [ 1576.102498] env[63024]: _type = "Task" [ 1576.102498] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.111613] env[63024]: DEBUG oslo_vmware.api [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950379, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.132299] env[63024]: DEBUG oslo_vmware.api [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950377, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.157301] env[63024]: DEBUG oslo_vmware.api [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52df16e9-559e-f8b9-6cb0-51b0cb98404d, 'name': SearchDatastore_Task, 'duration_secs': 0.053341} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1576.157652] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1576.158543] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 61fdfa06-cb40-44a3-8abc-428b26bd40f5/61fdfa06-cb40-44a3-8abc-428b26bd40f5.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1576.158543] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bbcbd56b-9115-4029-b7ce-8ddc4cd62e8d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.168863] env[63024]: DEBUG oslo_vmware.api [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Waiting for the task: (returnval){ [ 1576.168863] env[63024]: value = "task-1950380" [ 1576.168863] env[63024]: _type = "Task" [ 1576.168863] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.178475] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb82b998-764f-43c7-be72-77445e487230 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.185484] env[63024]: DEBUG oslo_vmware.api [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Task: {'id': task-1950380, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.190929] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32ec39af-4c85-4f83-afbf-7a5590230a99 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.227876] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ea41132-cc7f-4e57-84d4-375d3f43e165 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.236083] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c529851-9ee8-4981-9adf-0989f6efea27 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.254586] env[63024]: DEBUG nova.compute.provider_tree [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1576.365200] env[63024]: DEBUG nova.network.neutron [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Successfully created port: 0b9a5894-831a-4645-8fee-497016b36839 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1576.444769] env[63024]: DEBUG nova.compute.manager [req-985f2daa-43d6-4f81-8bcc-78e540099418 req-62b99e50-9cc4-42f7-abfb-6d6002d3cad4 service nova] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Received event network-vif-plugged-421d2adb-43a2-41f5-b64d-29989f6a0fa5 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1576.444769] env[63024]: DEBUG oslo_concurrency.lockutils [req-985f2daa-43d6-4f81-8bcc-78e540099418 req-62b99e50-9cc4-42f7-abfb-6d6002d3cad4 service nova] Acquiring lock "61fdfa06-cb40-44a3-8abc-428b26bd40f5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1576.445200] env[63024]: DEBUG oslo_concurrency.lockutils [req-985f2daa-43d6-4f81-8bcc-78e540099418 req-62b99e50-9cc4-42f7-abfb-6d6002d3cad4 service nova] Lock "61fdfa06-cb40-44a3-8abc-428b26bd40f5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1576.445377] env[63024]: DEBUG oslo_concurrency.lockutils [req-985f2daa-43d6-4f81-8bcc-78e540099418 req-62b99e50-9cc4-42f7-abfb-6d6002d3cad4 service nova] Lock "61fdfa06-cb40-44a3-8abc-428b26bd40f5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1576.445582] env[63024]: DEBUG nova.compute.manager [req-985f2daa-43d6-4f81-8bcc-78e540099418 req-62b99e50-9cc4-42f7-abfb-6d6002d3cad4 service nova] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] No waiting events found dispatching network-vif-plugged-421d2adb-43a2-41f5-b64d-29989f6a0fa5 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1576.445771] env[63024]: WARNING nova.compute.manager [req-985f2daa-43d6-4f81-8bcc-78e540099418 req-62b99e50-9cc4-42f7-abfb-6d6002d3cad4 service nova] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Received unexpected event network-vif-plugged-421d2adb-43a2-41f5-b64d-29989f6a0fa5 for instance with vm_state building and task_state spawning. [ 1576.445898] env[63024]: DEBUG nova.compute.manager [req-985f2daa-43d6-4f81-8bcc-78e540099418 req-62b99e50-9cc4-42f7-abfb-6d6002d3cad4 service nova] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Received event network-changed-7744ae27-9eae-4bcd-b5d8-425150caba4f {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1576.446065] env[63024]: DEBUG nova.compute.manager [req-985f2daa-43d6-4f81-8bcc-78e540099418 req-62b99e50-9cc4-42f7-abfb-6d6002d3cad4 service nova] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Refreshing instance network info cache due to event network-changed-7744ae27-9eae-4bcd-b5d8-425150caba4f. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1576.446258] env[63024]: DEBUG oslo_concurrency.lockutils [req-985f2daa-43d6-4f81-8bcc-78e540099418 req-62b99e50-9cc4-42f7-abfb-6d6002d3cad4 service nova] Acquiring lock "refresh_cache-22ef5bae-f7bc-43c7-9d77-1b4547e83b24" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1576.446447] env[63024]: DEBUG oslo_concurrency.lockutils [req-985f2daa-43d6-4f81-8bcc-78e540099418 req-62b99e50-9cc4-42f7-abfb-6d6002d3cad4 service nova] Acquired lock "refresh_cache-22ef5bae-f7bc-43c7-9d77-1b4547e83b24" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1576.446578] env[63024]: DEBUG nova.network.neutron [req-985f2daa-43d6-4f81-8bcc-78e540099418 req-62b99e50-9cc4-42f7-abfb-6d6002d3cad4 service nova] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Refreshing network info cache for port 7744ae27-9eae-4bcd-b5d8-425150caba4f {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1576.458583] env[63024]: DEBUG oslo_vmware.api [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Task: {'id': task-1950376, 'name': Rename_Task, 'duration_secs': 0.700905} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1576.465123] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] [instance: 51532b8e-4adf-4cc7-b91e-885d7934a7e8] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1576.465938] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-da2f454c-5e30-487d-b151-a7731cc23b11 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.476049] env[63024]: DEBUG oslo_vmware.api [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Waiting for the task: (returnval){ [ 1576.476049] env[63024]: value = "task-1950381" [ 1576.476049] env[63024]: _type = "Task" [ 1576.476049] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.488337] env[63024]: DEBUG oslo_vmware.api [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Task: {'id': task-1950381, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.514826] env[63024]: DEBUG oslo_vmware.api [None req-c48082a4-eda9-4a2f-b335-d346cf978e89 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950373, 'name': CloneVM_Task} progress is 95%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.580894] env[63024]: DEBUG oslo_vmware.api [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950378, 'name': ReconfigVM_Task, 'duration_secs': 0.257772} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1576.583847] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1576.583991] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a8a677ce-f862-46ed-9aab-70e193b7f897 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.597042] env[63024]: DEBUG oslo_vmware.api [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Waiting for the task: (returnval){ [ 1576.597042] env[63024]: value = "task-1950382" [ 1576.597042] env[63024]: _type = "Task" [ 1576.597042] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.598506] env[63024]: DEBUG nova.network.neutron [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1576.606172] env[63024]: DEBUG oslo_vmware.api [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950382, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.613759] env[63024]: DEBUG oslo_vmware.api [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950379, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074587} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1576.616110] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1576.618998] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b855cb5f-37dd-4f37-9bd5-2102ee200db3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.631817] env[63024]: DEBUG oslo_vmware.api [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950377, 'name': ReconfigVM_Task, 'duration_secs': 0.793159} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1576.642772] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Reconfigured VM instance instance-00000005 to attach disk [datastore1] f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df/f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1576.651287] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Reconfiguring VM instance instance-00000006 to attach disk [datastore1] 22ef5bae-f7bc-43c7-9d77-1b4547e83b24/22ef5bae-f7bc-43c7-9d77-1b4547e83b24.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1576.651800] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dcd10b08-6f66-4cbb-a994-5bcfef886802 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.653651] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8f6afd35-f8ad-4126-a716-c6c0ee291311 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.678471] env[63024]: DEBUG oslo_vmware.api [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Waiting for the task: (returnval){ [ 1576.678471] env[63024]: value = "task-1950384" [ 1576.678471] env[63024]: _type = "Task" [ 1576.678471] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.680010] env[63024]: DEBUG oslo_vmware.api [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Waiting for the task: (returnval){ [ 1576.680010] env[63024]: value = "task-1950383" [ 1576.680010] env[63024]: _type = "Task" [ 1576.680010] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1576.686269] env[63024]: DEBUG oslo_vmware.api [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Task: {'id': task-1950380, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.695864] env[63024]: DEBUG oslo_vmware.api [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950383, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.700102] env[63024]: DEBUG oslo_vmware.api [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950384, 'name': Rename_Task} progress is 10%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1576.778245] env[63024]: ERROR nova.scheduler.client.report [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] [req-2fb4fda9-59d3-4428-994e-460ef4633039] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 89dfa68a-133e-436f-a9f1-86051f9fb96b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-2fb4fda9-59d3-4428-994e-460ef4633039"}]} [ 1576.797637] env[63024]: DEBUG nova.scheduler.client.report [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Refreshing inventories for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1576.818448] env[63024]: DEBUG nova.scheduler.client.report [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Updating ProviderTree inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1576.818778] env[63024]: DEBUG nova.compute.provider_tree [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1576.822522] env[63024]: DEBUG nova.compute.manager [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1576.832022] env[63024]: DEBUG nova.scheduler.client.report [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Refreshing aggregate associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, aggregates: None {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1576.854637] env[63024]: DEBUG nova.virt.hardware [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1576.854637] env[63024]: DEBUG nova.virt.hardware [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1576.854637] env[63024]: DEBUG nova.virt.hardware [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1576.854905] env[63024]: DEBUG nova.virt.hardware [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1576.854905] env[63024]: DEBUG nova.virt.hardware [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1576.854905] env[63024]: DEBUG nova.virt.hardware [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1576.854905] env[63024]: DEBUG nova.virt.hardware [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1576.854905] env[63024]: DEBUG nova.virt.hardware [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1576.855080] env[63024]: DEBUG nova.virt.hardware [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1576.855080] env[63024]: DEBUG nova.virt.hardware [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1576.855080] env[63024]: DEBUG nova.virt.hardware [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1576.855598] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3d73d86-88a5-4e8e-89b0-0e92338ae2f5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.858882] env[63024]: DEBUG nova.network.neutron [req-985f2daa-43d6-4f81-8bcc-78e540099418 req-62b99e50-9cc4-42f7-abfb-6d6002d3cad4 service nova] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Updated VIF entry in instance network info cache for port 7744ae27-9eae-4bcd-b5d8-425150caba4f. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1576.859328] env[63024]: DEBUG nova.network.neutron [req-985f2daa-43d6-4f81-8bcc-78e540099418 req-62b99e50-9cc4-42f7-abfb-6d6002d3cad4 service nova] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Updating instance_info_cache with network_info: [{"id": "7744ae27-9eae-4bcd-b5d8-425150caba4f", "address": "fa:16:3e:0c:e4:50", "network": {"id": "f48a8d6b-d27a-400e-952d-435733c93214", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1457082334-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fd1cc46013d64c54a68b24c282ffe48b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2be3fdb5-359e-43bd-8c20-2ff00e81db55", "external-id": "nsx-vlan-transportzone-986", "segmentation_id": 986, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7744ae27-9e", "ovs_interfaceid": "7744ae27-9eae-4bcd-b5d8-425150caba4f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1576.861263] env[63024]: DEBUG nova.scheduler.client.report [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Refreshing trait associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1576.872602] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-382f0aad-2611-4d39-863b-d27210815f0e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1576.993998] env[63024]: DEBUG oslo_vmware.api [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Task: {'id': task-1950381, 'name': PowerOnVM_Task} progress is 1%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.014598] env[63024]: DEBUG oslo_vmware.api [None req-c48082a4-eda9-4a2f-b335-d346cf978e89 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950373, 'name': CloneVM_Task, 'duration_secs': 1.97418} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1577.014892] env[63024]: INFO nova.virt.vmwareapi.vmops [None req-c48082a4-eda9-4a2f-b335-d346cf978e89 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Created linked-clone VM from snapshot [ 1577.015826] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af18eb64-24e4-4c46-8142-b9004d00fa98 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.028687] env[63024]: DEBUG nova.virt.vmwareapi.images [None req-c48082a4-eda9-4a2f-b335-d346cf978e89 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Uploading image 64e00b8d-8a62-479c-8371-8a7594d98ee0 {{(pid=63024) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1577.038776] env[63024]: DEBUG oslo_concurrency.lockutils [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Acquiring lock "bd07735a-6a75-45fb-9cef-e1f2c301a489" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1577.040289] env[63024]: DEBUG oslo_concurrency.lockutils [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Lock "bd07735a-6a75-45fb-9cef-e1f2c301a489" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1577.064737] env[63024]: DEBUG oslo_vmware.rw_handles [None req-c48082a4-eda9-4a2f-b335-d346cf978e89 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1577.064737] env[63024]: value = "vm-401985" [ 1577.064737] env[63024]: _type = "VirtualMachine" [ 1577.064737] env[63024]: }. {{(pid=63024) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1577.065069] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-5f4d2ac4-5d47-48c5-87cc-bd804b9c2f26 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.083219] env[63024]: DEBUG oslo_vmware.rw_handles [None req-c48082a4-eda9-4a2f-b335-d346cf978e89 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lease: (returnval){ [ 1577.083219] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5287494d-5332-6064-0ced-32754f97a97b" [ 1577.083219] env[63024]: _type = "HttpNfcLease" [ 1577.083219] env[63024]: } obtained for exporting VM: (result){ [ 1577.083219] env[63024]: value = "vm-401985" [ 1577.083219] env[63024]: _type = "VirtualMachine" [ 1577.083219] env[63024]: }. {{(pid=63024) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1577.084034] env[63024]: DEBUG oslo_vmware.api [None req-c48082a4-eda9-4a2f-b335-d346cf978e89 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the lease: (returnval){ [ 1577.084034] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5287494d-5332-6064-0ced-32754f97a97b" [ 1577.084034] env[63024]: _type = "HttpNfcLease" [ 1577.084034] env[63024]: } to be ready. {{(pid=63024) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1577.090628] env[63024]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1577.090628] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5287494d-5332-6064-0ced-32754f97a97b" [ 1577.090628] env[63024]: _type = "HttpNfcLease" [ 1577.090628] env[63024]: } is initializing. {{(pid=63024) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1577.104716] env[63024]: DEBUG oslo_vmware.api [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950382, 'name': PowerOnVM_Task} progress is 1%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.182457] env[63024]: DEBUG oslo_vmware.api [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Task: {'id': task-1950380, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.658148} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1577.188092] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 61fdfa06-cb40-44a3-8abc-428b26bd40f5/61fdfa06-cb40-44a3-8abc-428b26bd40f5.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1577.188346] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1577.193034] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0b98d9fe-1637-451c-bd38-31149ae93a80 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.202992] env[63024]: DEBUG oslo_vmware.api [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950383, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.207073] env[63024]: DEBUG oslo_vmware.api [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950384, 'name': Rename_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.207716] env[63024]: DEBUG oslo_vmware.api [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Waiting for the task: (returnval){ [ 1577.207716] env[63024]: value = "task-1950386" [ 1577.207716] env[63024]: _type = "Task" [ 1577.207716] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1577.219607] env[63024]: DEBUG oslo_vmware.api [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Task: {'id': task-1950386, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.306925] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-670a3fae-5f03-4e05-b7ba-fef3845dbc84 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.314943] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83b84a7e-c4d7-4e75-bd80-2fb12c3923d4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.351218] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b62eb27-1328-43f1-a33c-d7ca82d1d6a7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.359218] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1570f773-1f2c-464d-8ff0-e10d2a40a0e4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.375274] env[63024]: DEBUG oslo_concurrency.lockutils [req-985f2daa-43d6-4f81-8bcc-78e540099418 req-62b99e50-9cc4-42f7-abfb-6d6002d3cad4 service nova] Releasing lock "refresh_cache-22ef5bae-f7bc-43c7-9d77-1b4547e83b24" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1577.376756] env[63024]: DEBUG nova.compute.manager [req-985f2daa-43d6-4f81-8bcc-78e540099418 req-62b99e50-9cc4-42f7-abfb-6d6002d3cad4 service nova] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Received event network-changed-421d2adb-43a2-41f5-b64d-29989f6a0fa5 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1577.376756] env[63024]: DEBUG nova.compute.manager [req-985f2daa-43d6-4f81-8bcc-78e540099418 req-62b99e50-9cc4-42f7-abfb-6d6002d3cad4 service nova] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Refreshing instance network info cache due to event network-changed-421d2adb-43a2-41f5-b64d-29989f6a0fa5. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1577.376756] env[63024]: DEBUG oslo_concurrency.lockutils [req-985f2daa-43d6-4f81-8bcc-78e540099418 req-62b99e50-9cc4-42f7-abfb-6d6002d3cad4 service nova] Acquiring lock "refresh_cache-61fdfa06-cb40-44a3-8abc-428b26bd40f5" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1577.376756] env[63024]: DEBUG oslo_concurrency.lockutils [req-985f2daa-43d6-4f81-8bcc-78e540099418 req-62b99e50-9cc4-42f7-abfb-6d6002d3cad4 service nova] Acquired lock "refresh_cache-61fdfa06-cb40-44a3-8abc-428b26bd40f5" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1577.376756] env[63024]: DEBUG nova.network.neutron [req-985f2daa-43d6-4f81-8bcc-78e540099418 req-62b99e50-9cc4-42f7-abfb-6d6002d3cad4 service nova] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Refreshing network info cache for port 421d2adb-43a2-41f5-b64d-29989f6a0fa5 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1577.377587] env[63024]: DEBUG nova.compute.provider_tree [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1577.488582] env[63024]: DEBUG oslo_vmware.api [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Task: {'id': task-1950381, 'name': PowerOnVM_Task} progress is 64%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.593678] env[63024]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1577.593678] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5287494d-5332-6064-0ced-32754f97a97b" [ 1577.593678] env[63024]: _type = "HttpNfcLease" [ 1577.593678] env[63024]: } is ready. {{(pid=63024) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1577.593678] env[63024]: DEBUG oslo_vmware.rw_handles [None req-c48082a4-eda9-4a2f-b335-d346cf978e89 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1577.593678] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5287494d-5332-6064-0ced-32754f97a97b" [ 1577.593678] env[63024]: _type = "HttpNfcLease" [ 1577.593678] env[63024]: }. {{(pid=63024) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1577.593678] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-559b91c1-608a-4eb5-a859-b5dbbff7adac {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.608876] env[63024]: DEBUG oslo_vmware.rw_handles [None req-c48082a4-eda9-4a2f-b335-d346cf978e89 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525dbbb7-b010-8452-5577-36d76b588c71/disk-0.vmdk from lease info. {{(pid=63024) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1577.609119] env[63024]: DEBUG oslo_vmware.rw_handles [None req-c48082a4-eda9-4a2f-b335-d346cf978e89 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525dbbb7-b010-8452-5577-36d76b588c71/disk-0.vmdk for reading. {{(pid=63024) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1577.622234] env[63024]: DEBUG oslo_vmware.api [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950382, 'name': PowerOnVM_Task} progress is 64%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.622455] env[63024]: DEBUG nova.network.neutron [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] Updating instance_info_cache with network_info: [{"id": "468acd69-27ef-4644-8085-504eba6c7955", "address": "fa:16:3e:87:00:ef", "network": {"id": "b22bfe1a-f169-41c3-ac9b-fdcf93268110", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.17", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d78401abb63840f4b461856cfdb6dbbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap468acd69-27", "ovs_interfaceid": "468acd69-27ef-4644-8085-504eba6c7955", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1577.707627] env[63024]: DEBUG oslo_vmware.api [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950384, 'name': Rename_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.710992] env[63024]: DEBUG oslo_vmware.api [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950383, 'name': ReconfigVM_Task, 'duration_secs': 0.536478} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1577.713789] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Reconfigured VM instance instance-00000006 to attach disk [datastore1] 22ef5bae-f7bc-43c7-9d77-1b4547e83b24/22ef5bae-f7bc-43c7-9d77-1b4547e83b24.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1577.714502] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1e955f3d-faaa-4520-91af-415ff2539c8a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.721200] env[63024]: DEBUG oslo_vmware.api [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Task: {'id': task-1950386, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070885} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1577.722548] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1577.722909] env[63024]: DEBUG oslo_vmware.api [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Waiting for the task: (returnval){ [ 1577.722909] env[63024]: value = "task-1950387" [ 1577.722909] env[63024]: _type = "Task" [ 1577.722909] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1577.725070] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67b94f0a-a7bd-49fd-aa24-ff72e0a8052c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.732784] env[63024]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d294db18-0e7b-41b3-9bb5-553754f4f0c0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.747541] env[63024]: DEBUG oslo_vmware.api [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950387, 'name': Rename_Task} progress is 10%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.755997] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Reconfiguring VM instance instance-00000007 to attach disk [datastore1] 61fdfa06-cb40-44a3-8abc-428b26bd40f5/61fdfa06-cb40-44a3-8abc-428b26bd40f5.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1577.759986] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-61b77323-d1dc-4f3d-9564-5ab91f461e96 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1577.784746] env[63024]: DEBUG oslo_vmware.api [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Waiting for the task: (returnval){ [ 1577.784746] env[63024]: value = "task-1950388" [ 1577.784746] env[63024]: _type = "Task" [ 1577.784746] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1577.797934] env[63024]: DEBUG oslo_vmware.api [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Task: {'id': task-1950388, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1577.814980] env[63024]: DEBUG nova.network.neutron [req-4e733658-1cbe-43b3-9a06-af2ba936a3b1 req-c096c0df-4e4a-48a7-80c4-7992eb1d6191 service nova] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Updated VIF entry in instance network info cache for port 90fdf5d2-f22d-4b1f-9b65-a0975e5c1cd3. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1577.815228] env[63024]: DEBUG nova.network.neutron [req-4e733658-1cbe-43b3-9a06-af2ba936a3b1 req-c096c0df-4e4a-48a7-80c4-7992eb1d6191 service nova] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Updating instance_info_cache with network_info: [{"id": "90fdf5d2-f22d-4b1f-9b65-a0975e5c1cd3", "address": "fa:16:3e:64:8e:6a", "network": {"id": "b22bfe1a-f169-41c3-ac9b-fdcf93268110", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d78401abb63840f4b461856cfdb6dbbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90fdf5d2-f2", "ovs_interfaceid": "90fdf5d2-f22d-4b1f-9b65-a0975e5c1cd3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1577.945507] env[63024]: DEBUG nova.scheduler.client.report [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Updated inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with generation 24 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1577.945720] env[63024]: DEBUG nova.compute.provider_tree [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Updating resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b generation from 24 to 25 during operation: update_inventory {{(pid=63024) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1577.945860] env[63024]: DEBUG nova.compute.provider_tree [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1577.994800] env[63024]: DEBUG oslo_vmware.api [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Task: {'id': task-1950381, 'name': PowerOnVM_Task, 'duration_secs': 1.511402} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1577.995979] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] [instance: 51532b8e-4adf-4cc7-b91e-885d7934a7e8] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1577.996417] env[63024]: INFO nova.compute.manager [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] [instance: 51532b8e-4adf-4cc7-b91e-885d7934a7e8] Took 8.42 seconds to spawn the instance on the hypervisor. [ 1577.996750] env[63024]: DEBUG nova.compute.manager [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] [instance: 51532b8e-4adf-4cc7-b91e-885d7934a7e8] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1577.999050] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8c6508b-8f03-4812-a1e0-c8f6023fcdcc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.109459] env[63024]: DEBUG oslo_vmware.api [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950382, 'name': PowerOnVM_Task, 'duration_secs': 1.406596} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1578.109459] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1578.114126] env[63024]: DEBUG nova.compute.manager [None req-dc6a4a0a-d2b2-46b5-a96c-94dfea34710a tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1578.115862] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-392e9986-d0bc-40b2-9930-93bebe90077b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.125750] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Releasing lock "refresh_cache-2bfcd5e1-b1d9-4829-bea5-d8c460ceec16" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1578.126133] env[63024]: DEBUG nova.compute.manager [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] Instance network_info: |[{"id": "468acd69-27ef-4644-8085-504eba6c7955", "address": "fa:16:3e:87:00:ef", "network": {"id": "b22bfe1a-f169-41c3-ac9b-fdcf93268110", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.17", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d78401abb63840f4b461856cfdb6dbbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap468acd69-27", "ovs_interfaceid": "468acd69-27ef-4644-8085-504eba6c7955", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1578.126981] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:87:00:ef', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8652fa2-e608-4fe6-8b35-402a40906b40', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '468acd69-27ef-4644-8085-504eba6c7955', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1578.136901] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Creating folder: Project (c720cdab04804a8390b825059692c3f3). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1578.140082] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-666e87ad-bd6e-49a1-9cc5-12237e2a7a4c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.149128] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Created folder: Project (c720cdab04804a8390b825059692c3f3) in parent group-v401959. [ 1578.149128] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Creating folder: Instances. Parent ref: group-v401986. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1578.149128] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f89d1782-0cec-4381-a920-97e3e35352b8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.165085] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Created folder: Instances in parent group-v401986. [ 1578.169100] env[63024]: DEBUG oslo.service.loopingcall [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1578.169100] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1578.169453] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-158b8b0a-4987-4158-80de-16d67796fd9e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.199677] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1578.199677] env[63024]: value = "task-1950391" [ 1578.199677] env[63024]: _type = "Task" [ 1578.199677] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1578.210172] env[63024]: DEBUG oslo_vmware.api [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950384, 'name': Rename_Task, 'duration_secs': 1.170347} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1578.210172] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1578.210172] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f48471ff-6e12-4945-9457-34b66049c41d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.215329] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950391, 'name': CreateVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.220829] env[63024]: DEBUG oslo_vmware.api [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Waiting for the task: (returnval){ [ 1578.220829] env[63024]: value = "task-1950392" [ 1578.220829] env[63024]: _type = "Task" [ 1578.220829] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1578.230084] env[63024]: DEBUG nova.network.neutron [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Successfully updated port: d962584b-9fa7-4c73-b446-b432b537aafd {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1578.245408] env[63024]: DEBUG oslo_vmware.api [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950392, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.253488] env[63024]: DEBUG oslo_vmware.api [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950387, 'name': Rename_Task, 'duration_secs': 0.262292} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1578.253872] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1578.254189] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-334cefa0-c718-4b8c-913f-e6348bd87a19 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.263741] env[63024]: DEBUG oslo_vmware.api [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Waiting for the task: (returnval){ [ 1578.263741] env[63024]: value = "task-1950393" [ 1578.263741] env[63024]: _type = "Task" [ 1578.263741] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1578.275416] env[63024]: DEBUG oslo_vmware.api [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950393, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.294658] env[63024]: DEBUG oslo_vmware.api [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Task: {'id': task-1950388, 'name': ReconfigVM_Task, 'duration_secs': 0.459062} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1578.295153] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Reconfigured VM instance instance-00000007 to attach disk [datastore1] 61fdfa06-cb40-44a3-8abc-428b26bd40f5/61fdfa06-cb40-44a3-8abc-428b26bd40f5.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1578.295816] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-622d4a53-2a3d-482f-b964-1e7a4c334c9d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.302881] env[63024]: DEBUG oslo_vmware.api [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Waiting for the task: (returnval){ [ 1578.302881] env[63024]: value = "task-1950394" [ 1578.302881] env[63024]: _type = "Task" [ 1578.302881] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1578.313864] env[63024]: DEBUG oslo_vmware.api [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Task: {'id': task-1950394, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.318557] env[63024]: DEBUG oslo_concurrency.lockutils [req-4e733658-1cbe-43b3-9a06-af2ba936a3b1 req-c096c0df-4e4a-48a7-80c4-7992eb1d6191 service nova] Releasing lock "refresh_cache-f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1578.318648] env[63024]: DEBUG nova.compute.manager [req-4e733658-1cbe-43b3-9a06-af2ba936a3b1 req-c096c0df-4e4a-48a7-80c4-7992eb1d6191 service nova] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Received event network-vif-deleted-9aa976e0-a2e8-4311-adcb-79d911535253 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1578.452920] env[63024]: DEBUG oslo_concurrency.lockutils [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.655s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1578.454091] env[63024]: DEBUG nova.compute.manager [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1578.459026] env[63024]: DEBUG oslo_concurrency.lockutils [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.177s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1578.459026] env[63024]: INFO nova.compute.claims [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1578.520903] env[63024]: INFO nova.compute.manager [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] [instance: 51532b8e-4adf-4cc7-b91e-885d7934a7e8] Took 16.19 seconds to build instance. [ 1578.527099] env[63024]: DEBUG nova.network.neutron [req-985f2daa-43d6-4f81-8bcc-78e540099418 req-62b99e50-9cc4-42f7-abfb-6d6002d3cad4 service nova] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Updated VIF entry in instance network info cache for port 421d2adb-43a2-41f5-b64d-29989f6a0fa5. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1578.528748] env[63024]: DEBUG nova.network.neutron [req-985f2daa-43d6-4f81-8bcc-78e540099418 req-62b99e50-9cc4-42f7-abfb-6d6002d3cad4 service nova] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Updating instance_info_cache with network_info: [{"id": "421d2adb-43a2-41f5-b64d-29989f6a0fa5", "address": "fa:16:3e:9e:9f:76", "network": {"id": "ab69012e-45b8-478c-9986-252d2f87d7d9", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1505663829-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e2bf9d113204b598844e72d5d49f155", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bed837fa-6b6a-4192-a229-a99426a46065", "external-id": "nsx-vlan-transportzone-954", "segmentation_id": 954, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap421d2adb-43", "ovs_interfaceid": "421d2adb-43a2-41f5-b64d-29989f6a0fa5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1578.714152] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950391, 'name': CreateVM_Task, 'duration_secs': 0.42561} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1578.714390] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1578.715065] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1578.715235] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1578.715546] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1578.715808] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0fd8b1e0-765d-4fae-85c3-dee638ae151c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.721669] env[63024]: DEBUG oslo_vmware.api [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Waiting for the task: (returnval){ [ 1578.721669] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]523cb744-68d7-5f14-96c7-7b05ddf98c47" [ 1578.721669] env[63024]: _type = "Task" [ 1578.721669] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1578.739026] env[63024]: DEBUG oslo_vmware.api [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950392, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.739026] env[63024]: DEBUG oslo_vmware.api [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]523cb744-68d7-5f14-96c7-7b05ddf98c47, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.745761] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Acquiring lock "refresh_cache-e03b8577-9298-4e88-98ea-6258e97db28d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1578.746210] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Acquired lock "refresh_cache-e03b8577-9298-4e88-98ea-6258e97db28d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1578.746551] env[63024]: DEBUG nova.network.neutron [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1578.774200] env[63024]: DEBUG oslo_vmware.api [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950393, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.814175] env[63024]: DEBUG oslo_vmware.api [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Task: {'id': task-1950394, 'name': Rename_Task, 'duration_secs': 0.2081} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1578.816303] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1578.816303] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-329dd618-0543-488a-aec6-e0931e3f86e0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1578.822963] env[63024]: DEBUG oslo_vmware.api [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Waiting for the task: (returnval){ [ 1578.822963] env[63024]: value = "task-1950395" [ 1578.822963] env[63024]: _type = "Task" [ 1578.822963] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1578.831963] env[63024]: DEBUG oslo_vmware.api [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Task: {'id': task-1950395, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1578.969182] env[63024]: DEBUG nova.compute.utils [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1578.973217] env[63024]: DEBUG nova.compute.manager [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1578.973217] env[63024]: DEBUG nova.network.neutron [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1579.033398] env[63024]: DEBUG oslo_concurrency.lockutils [None req-77c7547c-3170-4585-b17e-a1199ee8bed4 tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Lock "51532b8e-4adf-4cc7-b91e-885d7934a7e8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.717s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1579.036807] env[63024]: DEBUG oslo_concurrency.lockutils [req-985f2daa-43d6-4f81-8bcc-78e540099418 req-62b99e50-9cc4-42f7-abfb-6d6002d3cad4 service nova] Releasing lock "refresh_cache-61fdfa06-cb40-44a3-8abc-428b26bd40f5" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1579.118265] env[63024]: DEBUG nova.policy [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2e74ec40c04540d098903bdd67db2313', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '86b72c4136c2427785ba0a458352ef78', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1579.245485] env[63024]: DEBUG oslo_vmware.api [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]523cb744-68d7-5f14-96c7-7b05ddf98c47, 'name': SearchDatastore_Task, 'duration_secs': 0.020001} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1579.246582] env[63024]: DEBUG oslo_vmware.api [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950392, 'name': PowerOnVM_Task, 'duration_secs': 0.528844} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1579.246976] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1579.247169] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1579.247369] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1579.247564] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1579.248021] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1579.248173] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1579.250038] env[63024]: INFO nova.compute.manager [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Took 16.90 seconds to spawn the instance on the hypervisor. [ 1579.250038] env[63024]: DEBUG nova.compute.manager [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1579.250038] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dafde623-392e-4a16-92e3-cf4202b601b8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.253074] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60a3ffbb-77e1-4d7b-8480-c2f740e6d774 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.272910] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1579.273084] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1579.274872] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-452b8dd5-924c-4290-a207-edeb5f3cb9ac {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.285284] env[63024]: DEBUG oslo_vmware.api [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950393, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.287070] env[63024]: DEBUG oslo_vmware.api [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Waiting for the task: (returnval){ [ 1579.287070] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5271ed07-baac-ec14-87a8-2d9c11ae592c" [ 1579.287070] env[63024]: _type = "Task" [ 1579.287070] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1579.300081] env[63024]: DEBUG oslo_vmware.api [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5271ed07-baac-ec14-87a8-2d9c11ae592c, 'name': SearchDatastore_Task, 'duration_secs': 0.01234} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1579.300081] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e15ca607-2131-4526-869c-dd60c895617a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.311324] env[63024]: DEBUG oslo_vmware.api [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Waiting for the task: (returnval){ [ 1579.311324] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]520ed8d0-42e5-6ea9-0585-e1ed5291c305" [ 1579.311324] env[63024]: _type = "Task" [ 1579.311324] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1579.320015] env[63024]: DEBUG oslo_vmware.api [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]520ed8d0-42e5-6ea9-0585-e1ed5291c305, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.334671] env[63024]: DEBUG oslo_vmware.api [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Task: {'id': task-1950395, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.387307] env[63024]: DEBUG nova.network.neutron [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1579.473079] env[63024]: DEBUG nova.compute.manager [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1579.540261] env[63024]: DEBUG nova.compute.manager [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1579.784994] env[63024]: INFO nova.compute.manager [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Took 21.93 seconds to build instance. [ 1579.790187] env[63024]: DEBUG oslo_vmware.api [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950393, 'name': PowerOnVM_Task, 'duration_secs': 1.231164} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1579.790917] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1579.791247] env[63024]: INFO nova.compute.manager [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Took 15.13 seconds to spawn the instance on the hypervisor. [ 1579.791519] env[63024]: DEBUG nova.compute.manager [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1579.793270] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3b50d2c-b9f6-40f0-bc8b-193dbd8666d0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.827253] env[63024]: DEBUG oslo_vmware.api [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]520ed8d0-42e5-6ea9-0585-e1ed5291c305, 'name': SearchDatastore_Task, 'duration_secs': 0.012094} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1579.830707] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1579.830707] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16/2bfcd5e1-b1d9-4829-bea5-d8c460ceec16.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1579.831301] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-93e428a9-58de-4e3f-8c56-72a5b7ac5a0c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.846558] env[63024]: DEBUG oslo_vmware.api [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Task: {'id': task-1950395, 'name': PowerOnVM_Task, 'duration_secs': 0.659538} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1579.848454] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1579.849358] env[63024]: INFO nova.compute.manager [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Took 12.88 seconds to spawn the instance on the hypervisor. [ 1579.849704] env[63024]: DEBUG nova.compute.manager [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1579.850320] env[63024]: DEBUG oslo_vmware.api [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Waiting for the task: (returnval){ [ 1579.850320] env[63024]: value = "task-1950396" [ 1579.850320] env[63024]: _type = "Task" [ 1579.850320] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1579.851587] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8af323a8-eb19-41fe-af31-8f7857ae4485 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.872204] env[63024]: DEBUG oslo_vmware.api [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Task: {'id': task-1950396, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1579.875683] env[63024]: DEBUG nova.compute.manager [req-7af53148-1f64-4321-901f-008fbd083132 req-0a030295-c29d-4d36-aa45-2bca38b3a68e service nova] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] Received event network-vif-plugged-468acd69-27ef-4644-8085-504eba6c7955 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1579.875999] env[63024]: DEBUG oslo_concurrency.lockutils [req-7af53148-1f64-4321-901f-008fbd083132 req-0a030295-c29d-4d36-aa45-2bca38b3a68e service nova] Acquiring lock "2bfcd5e1-b1d9-4829-bea5-d8c460ceec16-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1579.876094] env[63024]: DEBUG oslo_concurrency.lockutils [req-7af53148-1f64-4321-901f-008fbd083132 req-0a030295-c29d-4d36-aa45-2bca38b3a68e service nova] Lock "2bfcd5e1-b1d9-4829-bea5-d8c460ceec16-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1579.876258] env[63024]: DEBUG oslo_concurrency.lockutils [req-7af53148-1f64-4321-901f-008fbd083132 req-0a030295-c29d-4d36-aa45-2bca38b3a68e service nova] Lock "2bfcd5e1-b1d9-4829-bea5-d8c460ceec16-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1579.876419] env[63024]: DEBUG nova.compute.manager [req-7af53148-1f64-4321-901f-008fbd083132 req-0a030295-c29d-4d36-aa45-2bca38b3a68e service nova] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] No waiting events found dispatching network-vif-plugged-468acd69-27ef-4644-8085-504eba6c7955 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1579.876855] env[63024]: WARNING nova.compute.manager [req-7af53148-1f64-4321-901f-008fbd083132 req-0a030295-c29d-4d36-aa45-2bca38b3a68e service nova] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] Received unexpected event network-vif-plugged-468acd69-27ef-4644-8085-504eba6c7955 for instance with vm_state building and task_state spawning. [ 1579.876855] env[63024]: DEBUG nova.compute.manager [req-7af53148-1f64-4321-901f-008fbd083132 req-0a030295-c29d-4d36-aa45-2bca38b3a68e service nova] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] Received event network-changed-468acd69-27ef-4644-8085-504eba6c7955 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1579.876855] env[63024]: DEBUG nova.compute.manager [req-7af53148-1f64-4321-901f-008fbd083132 req-0a030295-c29d-4d36-aa45-2bca38b3a68e service nova] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] Refreshing instance network info cache due to event network-changed-468acd69-27ef-4644-8085-504eba6c7955. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1579.877050] env[63024]: DEBUG oslo_concurrency.lockutils [req-7af53148-1f64-4321-901f-008fbd083132 req-0a030295-c29d-4d36-aa45-2bca38b3a68e service nova] Acquiring lock "refresh_cache-2bfcd5e1-b1d9-4829-bea5-d8c460ceec16" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1579.877218] env[63024]: DEBUG oslo_concurrency.lockutils [req-7af53148-1f64-4321-901f-008fbd083132 req-0a030295-c29d-4d36-aa45-2bca38b3a68e service nova] Acquired lock "refresh_cache-2bfcd5e1-b1d9-4829-bea5-d8c460ceec16" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1579.877376] env[63024]: DEBUG nova.network.neutron [req-7af53148-1f64-4321-901f-008fbd083132 req-0a030295-c29d-4d36-aa45-2bca38b3a68e service nova] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] Refreshing network info cache for port 468acd69-27ef-4644-8085-504eba6c7955 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1579.895492] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03e63580-0703-46e1-bf09-cc41e090a916 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.903054] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-572cfc02-9149-40ec-b55c-f63ad5392423 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.938395] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cb7c74c-adf5-4e4e-b162-c85ec575b587 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.947232] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a575f17-26ac-4fed-a6fa-f4fa6d634c4a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1579.962045] env[63024]: DEBUG nova.compute.provider_tree [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1580.066685] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1580.293397] env[63024]: DEBUG oslo_concurrency.lockutils [None req-782f892d-4fca-4430-ad62-d9dbd5672124 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Lock "f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.446s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1580.318048] env[63024]: INFO nova.compute.manager [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Took 20.30 seconds to build instance. [ 1580.366619] env[63024]: DEBUG oslo_vmware.api [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Task: {'id': task-1950396, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.380564] env[63024]: INFO nova.compute.manager [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Took 18.43 seconds to build instance. [ 1580.455870] env[63024]: DEBUG nova.network.neutron [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Updating instance_info_cache with network_info: [{"id": "d962584b-9fa7-4c73-b446-b432b537aafd", "address": "fa:16:3e:7a:de:28", "network": {"id": "7c2acd24-557d-4fb0-bb17-ae985019dd54", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1770461600-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9860b12ec09944ddacb54f69a18d4c4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7bcd9d2d-25c8-41ad-9a4a-93b9029ba993", "external-id": "nsx-vlan-transportzone-734", "segmentation_id": 734, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd962584b-9f", "ovs_interfaceid": "d962584b-9fa7-4c73-b446-b432b537aafd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1580.465142] env[63024]: DEBUG nova.scheduler.client.report [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1580.488184] env[63024]: DEBUG nova.compute.manager [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1580.534680] env[63024]: DEBUG nova.virt.hardware [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1580.535767] env[63024]: DEBUG nova.virt.hardware [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1580.535767] env[63024]: DEBUG nova.virt.hardware [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1580.535767] env[63024]: DEBUG nova.virt.hardware [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1580.535767] env[63024]: DEBUG nova.virt.hardware [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1580.540664] env[63024]: DEBUG nova.virt.hardware [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1580.540664] env[63024]: DEBUG nova.virt.hardware [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1580.540664] env[63024]: DEBUG nova.virt.hardware [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1580.540664] env[63024]: DEBUG nova.virt.hardware [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1580.540664] env[63024]: DEBUG nova.virt.hardware [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1580.541203] env[63024]: DEBUG nova.virt.hardware [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1580.541944] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ea590e3-24f7-411d-abf8-e96ab2976863 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.549519] env[63024]: DEBUG nova.network.neutron [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Successfully updated port: 0b9a5894-831a-4645-8fee-497016b36839 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1580.557383] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9eb1663-8520-4428-9d38-4697937efc16 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.721933] env[63024]: DEBUG nova.network.neutron [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] Successfully created port: 459dce7c-f846-4532-bf5c-5ae83c31b50a {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1580.799433] env[63024]: DEBUG nova.compute.manager [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1580.821610] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2a5ac652-ef98-4d7b-98c1-65955fca95f5 tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Lock "22ef5bae-f7bc-43c7-9d77-1b4547e83b24" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.816s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1580.877556] env[63024]: DEBUG oslo_vmware.api [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Task: {'id': task-1950396, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.898919} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1580.878878] env[63024]: DEBUG nova.compute.manager [None req-19436498-6f54-4591-8a36-d207fc9e6e93 tempest-ServerDiagnosticsV248Test-2111448382 tempest-ServerDiagnosticsV248Test-2111448382-project-admin] [instance: 51532b8e-4adf-4cc7-b91e-885d7934a7e8] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1580.879434] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16/2bfcd5e1-b1d9-4829-bea5-d8c460ceec16.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1580.879434] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1580.880919] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0caae2d-4c69-408e-a1ef-c3a280ecfc50 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.884056] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cda2ea94-91ff-4f62-8f38-bbec8dcbcd06 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.889335] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b8b109fa-6997-4d20-8b57-c714661f6482 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Lock "61fdfa06-cb40-44a3-8abc-428b26bd40f5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.951s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1580.894220] env[63024]: INFO nova.compute.manager [None req-19436498-6f54-4591-8a36-d207fc9e6e93 tempest-ServerDiagnosticsV248Test-2111448382 tempest-ServerDiagnosticsV248Test-2111448382-project-admin] [instance: 51532b8e-4adf-4cc7-b91e-885d7934a7e8] Retrieving diagnostics [ 1580.896149] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-862f8589-009e-408d-a3fe-d7302e59ba0c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.900993] env[63024]: DEBUG oslo_vmware.api [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Waiting for the task: (returnval){ [ 1580.900993] env[63024]: value = "task-1950397" [ 1580.900993] env[63024]: _type = "Task" [ 1580.900993] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1580.942047] env[63024]: DEBUG oslo_vmware.api [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Task: {'id': task-1950397, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1580.959718] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Releasing lock "refresh_cache-e03b8577-9298-4e88-98ea-6258e97db28d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1580.960635] env[63024]: DEBUG nova.compute.manager [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Instance network_info: |[{"id": "d962584b-9fa7-4c73-b446-b432b537aafd", "address": "fa:16:3e:7a:de:28", "network": {"id": "7c2acd24-557d-4fb0-bb17-ae985019dd54", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1770461600-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9860b12ec09944ddacb54f69a18d4c4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7bcd9d2d-25c8-41ad-9a4a-93b9029ba993", "external-id": "nsx-vlan-transportzone-734", "segmentation_id": 734, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd962584b-9f", "ovs_interfaceid": "d962584b-9fa7-4c73-b446-b432b537aafd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1580.960996] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7a:de:28', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7bcd9d2d-25c8-41ad-9a4a-93b9029ba993', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd962584b-9fa7-4c73-b446-b432b537aafd', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1580.968305] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Creating folder: Project (9860b12ec09944ddacb54f69a18d4c4c). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1580.968993] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-62aa2a70-810e-4fa1-8005-e04576b75efe {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1580.974031] env[63024]: DEBUG oslo_concurrency.lockutils [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.517s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1580.974398] env[63024]: DEBUG nova.compute.manager [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1580.977288] env[63024]: DEBUG oslo_concurrency.lockutils [None req-988d5bc0-fea9-4cd4-83c5-eae1d9fdd5b7 tempest-DeleteServersAdminTestJSON-2143423831 tempest-DeleteServersAdminTestJSON-2143423831-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.568s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1580.977510] env[63024]: DEBUG nova.objects.instance [None req-988d5bc0-fea9-4cd4-83c5-eae1d9fdd5b7 tempest-DeleteServersAdminTestJSON-2143423831 tempest-DeleteServersAdminTestJSON-2143423831-project-admin] Lazy-loading 'resources' on Instance uuid f90f35a2-f2ee-45e2-a9e4-afce50f29aa0 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1580.988484] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Created folder: Project (9860b12ec09944ddacb54f69a18d4c4c) in parent group-v401959. [ 1580.988484] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Creating folder: Instances. Parent ref: group-v401989. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1580.988682] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-215f24a5-2d66-41aa-bd37-d872f8325114 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.001325] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Created folder: Instances in parent group-v401989. [ 1581.002526] env[63024]: DEBUG oslo.service.loopingcall [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1581.002526] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1581.002526] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a1519146-2066-4e48-8f0c-a8ccb0619b6e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.025363] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1581.025363] env[63024]: value = "task-1950400" [ 1581.025363] env[63024]: _type = "Task" [ 1581.025363] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1581.037596] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950400, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.057758] env[63024]: DEBUG oslo_concurrency.lockutils [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Acquiring lock "refresh_cache-7146277f-2621-4e8f-a14c-49bf4dd052db" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1581.057758] env[63024]: DEBUG oslo_concurrency.lockutils [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Acquired lock "refresh_cache-7146277f-2621-4e8f-a14c-49bf4dd052db" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1581.057758] env[63024]: DEBUG nova.network.neutron [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1581.078713] env[63024]: DEBUG nova.network.neutron [req-7af53148-1f64-4321-901f-008fbd083132 req-0a030295-c29d-4d36-aa45-2bca38b3a68e service nova] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] Updated VIF entry in instance network info cache for port 468acd69-27ef-4644-8085-504eba6c7955. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1581.079392] env[63024]: DEBUG nova.network.neutron [req-7af53148-1f64-4321-901f-008fbd083132 req-0a030295-c29d-4d36-aa45-2bca38b3a68e service nova] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] Updating instance_info_cache with network_info: [{"id": "468acd69-27ef-4644-8085-504eba6c7955", "address": "fa:16:3e:87:00:ef", "network": {"id": "b22bfe1a-f169-41c3-ac9b-fdcf93268110", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.17", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d78401abb63840f4b461856cfdb6dbbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap468acd69-27", "ovs_interfaceid": "468acd69-27ef-4644-8085-504eba6c7955", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1581.329629] env[63024]: DEBUG nova.compute.manager [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1581.333466] env[63024]: DEBUG oslo_concurrency.lockutils [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1581.396094] env[63024]: DEBUG nova.compute.manager [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1581.416358] env[63024]: DEBUG oslo_vmware.api [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Task: {'id': task-1950397, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076431} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1581.416358] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1581.416358] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f44d4d3-8f89-4b0c-b514-75855493e72f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.444912] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] Reconfiguring VM instance instance-00000009 to attach disk [datastore1] 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16/2bfcd5e1-b1d9-4829-bea5-d8c460ceec16.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1581.447704] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c49890a3-d255-4251-a19a-215b3de4514a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.473072] env[63024]: DEBUG oslo_vmware.api [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Waiting for the task: (returnval){ [ 1581.473072] env[63024]: value = "task-1950401" [ 1581.473072] env[63024]: _type = "Task" [ 1581.473072] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1581.483058] env[63024]: DEBUG nova.compute.utils [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1581.489448] env[63024]: DEBUG nova.compute.manager [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1581.489645] env[63024]: DEBUG nova.network.neutron [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1581.492197] env[63024]: DEBUG oslo_vmware.api [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Task: {'id': task-1950401, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.537517] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950400, 'name': CreateVM_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.585223] env[63024]: DEBUG oslo_concurrency.lockutils [req-7af53148-1f64-4321-901f-008fbd083132 req-0a030295-c29d-4d36-aa45-2bca38b3a68e service nova] Releasing lock "refresh_cache-2bfcd5e1-b1d9-4829-bea5-d8c460ceec16" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1581.585223] env[63024]: DEBUG nova.compute.manager [req-7af53148-1f64-4321-901f-008fbd083132 req-0a030295-c29d-4d36-aa45-2bca38b3a68e service nova] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Received event network-vif-plugged-d962584b-9fa7-4c73-b446-b432b537aafd {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1581.585223] env[63024]: DEBUG oslo_concurrency.lockutils [req-7af53148-1f64-4321-901f-008fbd083132 req-0a030295-c29d-4d36-aa45-2bca38b3a68e service nova] Acquiring lock "e03b8577-9298-4e88-98ea-6258e97db28d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1581.585223] env[63024]: DEBUG oslo_concurrency.lockutils [req-7af53148-1f64-4321-901f-008fbd083132 req-0a030295-c29d-4d36-aa45-2bca38b3a68e service nova] Lock "e03b8577-9298-4e88-98ea-6258e97db28d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1581.585223] env[63024]: DEBUG oslo_concurrency.lockutils [req-7af53148-1f64-4321-901f-008fbd083132 req-0a030295-c29d-4d36-aa45-2bca38b3a68e service nova] Lock "e03b8577-9298-4e88-98ea-6258e97db28d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1581.585646] env[63024]: DEBUG nova.compute.manager [req-7af53148-1f64-4321-901f-008fbd083132 req-0a030295-c29d-4d36-aa45-2bca38b3a68e service nova] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] No waiting events found dispatching network-vif-plugged-d962584b-9fa7-4c73-b446-b432b537aafd {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1581.585646] env[63024]: WARNING nova.compute.manager [req-7af53148-1f64-4321-901f-008fbd083132 req-0a030295-c29d-4d36-aa45-2bca38b3a68e service nova] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Received unexpected event network-vif-plugged-d962584b-9fa7-4c73-b446-b432b537aafd for instance with vm_state building and task_state spawning. [ 1581.585646] env[63024]: DEBUG nova.compute.manager [req-7af53148-1f64-4321-901f-008fbd083132 req-0a030295-c29d-4d36-aa45-2bca38b3a68e service nova] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Received event network-changed-d962584b-9fa7-4c73-b446-b432b537aafd {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1581.585646] env[63024]: DEBUG nova.compute.manager [req-7af53148-1f64-4321-901f-008fbd083132 req-0a030295-c29d-4d36-aa45-2bca38b3a68e service nova] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Refreshing instance network info cache due to event network-changed-d962584b-9fa7-4c73-b446-b432b537aafd. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1581.585646] env[63024]: DEBUG oslo_concurrency.lockutils [req-7af53148-1f64-4321-901f-008fbd083132 req-0a030295-c29d-4d36-aa45-2bca38b3a68e service nova] Acquiring lock "refresh_cache-e03b8577-9298-4e88-98ea-6258e97db28d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1581.585801] env[63024]: DEBUG oslo_concurrency.lockutils [req-7af53148-1f64-4321-901f-008fbd083132 req-0a030295-c29d-4d36-aa45-2bca38b3a68e service nova] Acquired lock "refresh_cache-e03b8577-9298-4e88-98ea-6258e97db28d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1581.585801] env[63024]: DEBUG nova.network.neutron [req-7af53148-1f64-4321-901f-008fbd083132 req-0a030295-c29d-4d36-aa45-2bca38b3a68e service nova] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Refreshing network info cache for port d962584b-9fa7-4c73-b446-b432b537aafd {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1581.644723] env[63024]: DEBUG nova.policy [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '35aa83bcc1f74c848d3f6d946f99972b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '142b37c08dcc40e5ab889fa485953fab', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1581.713660] env[63024]: DEBUG nova.network.neutron [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1581.843125] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c137a1f-61b2-417d-a59b-ffd3bc3063f1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.851952] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b85087e-7ca1-41d9-be45-eaa4e20f85f4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.889013] env[63024]: DEBUG oslo_concurrency.lockutils [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1581.891345] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5deffdd4-0624-4bff-bee2-d0e6f27c1d3f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.903804] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a036d9b-8445-4360-aa97-c95f150599fa {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1581.928378] env[63024]: DEBUG nova.compute.provider_tree [None req-988d5bc0-fea9-4cd4-83c5-eae1d9fdd5b7 tempest-DeleteServersAdminTestJSON-2143423831 tempest-DeleteServersAdminTestJSON-2143423831-project-admin] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1581.937132] env[63024]: DEBUG oslo_concurrency.lockutils [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1581.986102] env[63024]: DEBUG oslo_vmware.api [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Task: {'id': task-1950401, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1581.992173] env[63024]: DEBUG nova.compute.manager [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1582.039633] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950400, 'name': CreateVM_Task, 'duration_secs': 0.735302} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1582.040585] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1582.041432] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1582.041645] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1582.042037] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1582.042617] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1d971bf-0709-47b4-927d-3dcceac4467b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.048268] env[63024]: DEBUG oslo_vmware.api [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Waiting for the task: (returnval){ [ 1582.048268] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52f2eb77-87e0-8575-c345-7917a3e936fa" [ 1582.048268] env[63024]: _type = "Task" [ 1582.048268] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.058782] env[63024]: DEBUG oslo_vmware.api [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52f2eb77-87e0-8575-c345-7917a3e936fa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.317348] env[63024]: DEBUG nova.network.neutron [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Updating instance_info_cache with network_info: [{"id": "0b9a5894-831a-4645-8fee-497016b36839", "address": "fa:16:3e:16:86:10", "network": {"id": "8df09e32-2cef-4759-a80d-a935973669ba", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-205905598-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18ce2b5b54b54a71af0b71ba887dd2a4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11b669be-fb26-4ef8-bdb6-c77ab9d06daf", "external-id": "nsx-vlan-transportzone-633", "segmentation_id": 633, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b9a5894-83", "ovs_interfaceid": "0b9a5894-831a-4645-8fee-497016b36839", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1582.432563] env[63024]: DEBUG nova.scheduler.client.report [None req-988d5bc0-fea9-4cd4-83c5-eae1d9fdd5b7 tempest-DeleteServersAdminTestJSON-2143423831 tempest-DeleteServersAdminTestJSON-2143423831-project-admin] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1582.500176] env[63024]: DEBUG oslo_vmware.api [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Task: {'id': task-1950401, 'name': ReconfigVM_Task, 'duration_secs': 0.562341} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1582.508464] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] Reconfigured VM instance instance-00000009 to attach disk [datastore1] 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16/2bfcd5e1-b1d9-4829-bea5-d8c460ceec16.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1582.511736] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e9f1ca58-e1a8-4d48-87ed-48c4de53011a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.520425] env[63024]: DEBUG oslo_vmware.api [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Waiting for the task: (returnval){ [ 1582.520425] env[63024]: value = "task-1950402" [ 1582.520425] env[63024]: _type = "Task" [ 1582.520425] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.530903] env[63024]: DEBUG oslo_vmware.api [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Task: {'id': task-1950402, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.560229] env[63024]: DEBUG oslo_vmware.api [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52f2eb77-87e0-8575-c345-7917a3e936fa, 'name': SearchDatastore_Task, 'duration_secs': 0.01745} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1582.560553] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1582.560781] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1582.561095] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1582.561177] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1582.561339] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1582.561588] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5ddd851b-3193-4ef3-925c-90f6a1f7c3d2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.571773] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1582.571773] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1582.572077] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d63d579-f14e-46f6-af05-96a56053ff16 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.579037] env[63024]: DEBUG oslo_vmware.api [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Waiting for the task: (returnval){ [ 1582.579037] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52a4063d-8f42-6407-bdb2-d006aea9c3ca" [ 1582.579037] env[63024]: _type = "Task" [ 1582.579037] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.588968] env[63024]: DEBUG oslo_vmware.api [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52a4063d-8f42-6407-bdb2-d006aea9c3ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.825101] env[63024]: DEBUG oslo_concurrency.lockutils [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Releasing lock "refresh_cache-7146277f-2621-4e8f-a14c-49bf4dd052db" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1582.825101] env[63024]: DEBUG nova.compute.manager [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Instance network_info: |[{"id": "0b9a5894-831a-4645-8fee-497016b36839", "address": "fa:16:3e:16:86:10", "network": {"id": "8df09e32-2cef-4759-a80d-a935973669ba", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-205905598-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18ce2b5b54b54a71af0b71ba887dd2a4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11b669be-fb26-4ef8-bdb6-c77ab9d06daf", "external-id": "nsx-vlan-transportzone-633", "segmentation_id": 633, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b9a5894-83", "ovs_interfaceid": "0b9a5894-831a-4645-8fee-497016b36839", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1582.825519] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:16:86:10', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '11b669be-fb26-4ef8-bdb6-c77ab9d06daf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0b9a5894-831a-4645-8fee-497016b36839', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1582.836102] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Creating folder: Project (18ce2b5b54b54a71af0b71ba887dd2a4). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1582.836102] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3624fb3e-eb51-442b-a202-449ae967ebcc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.849210] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Created folder: Project (18ce2b5b54b54a71af0b71ba887dd2a4) in parent group-v401959. [ 1582.849432] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Creating folder: Instances. Parent ref: group-v401992. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1582.849692] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fc3f0953-ade4-49cc-ba86-6a75b4c7b6ee {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.860968] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Created folder: Instances in parent group-v401992. [ 1582.861079] env[63024]: DEBUG oslo.service.loopingcall [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1582.861429] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1582.861865] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-75bc90eb-7919-4f28-8f49-e13b9fe3ea38 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1582.884330] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1582.884330] env[63024]: value = "task-1950405" [ 1582.884330] env[63024]: _type = "Task" [ 1582.884330] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1582.892723] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950405, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1582.933123] env[63024]: DEBUG nova.network.neutron [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Successfully created port: de190f04-f1aa-479a-b49d-4cf36ac4475f {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1582.938075] env[63024]: DEBUG oslo_concurrency.lockutils [None req-988d5bc0-fea9-4cd4-83c5-eae1d9fdd5b7 tempest-DeleteServersAdminTestJSON-2143423831 tempest-DeleteServersAdminTestJSON-2143423831-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.961s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1582.941544] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.374s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1582.942496] env[63024]: INFO nova.compute.claims [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1582.958914] env[63024]: DEBUG nova.network.neutron [req-7af53148-1f64-4321-901f-008fbd083132 req-0a030295-c29d-4d36-aa45-2bca38b3a68e service nova] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Updated VIF entry in instance network info cache for port d962584b-9fa7-4c73-b446-b432b537aafd. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1582.959564] env[63024]: DEBUG nova.network.neutron [req-7af53148-1f64-4321-901f-008fbd083132 req-0a030295-c29d-4d36-aa45-2bca38b3a68e service nova] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Updating instance_info_cache with network_info: [{"id": "d962584b-9fa7-4c73-b446-b432b537aafd", "address": "fa:16:3e:7a:de:28", "network": {"id": "7c2acd24-557d-4fb0-bb17-ae985019dd54", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1770461600-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9860b12ec09944ddacb54f69a18d4c4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7bcd9d2d-25c8-41ad-9a4a-93b9029ba993", "external-id": "nsx-vlan-transportzone-734", "segmentation_id": 734, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd962584b-9f", "ovs_interfaceid": "d962584b-9fa7-4c73-b446-b432b537aafd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1582.981646] env[63024]: INFO nova.scheduler.client.report [None req-988d5bc0-fea9-4cd4-83c5-eae1d9fdd5b7 tempest-DeleteServersAdminTestJSON-2143423831 tempest-DeleteServersAdminTestJSON-2143423831-project-admin] Deleted allocations for instance f90f35a2-f2ee-45e2-a9e4-afce50f29aa0 [ 1583.004141] env[63024]: DEBUG nova.compute.manager [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1583.037370] env[63024]: DEBUG nova.virt.hardware [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1583.037370] env[63024]: DEBUG nova.virt.hardware [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1583.037370] env[63024]: DEBUG nova.virt.hardware [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1583.037521] env[63024]: DEBUG nova.virt.hardware [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1583.037521] env[63024]: DEBUG nova.virt.hardware [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1583.037521] env[63024]: DEBUG nova.virt.hardware [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1583.037521] env[63024]: DEBUG nova.virt.hardware [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1583.037521] env[63024]: DEBUG nova.virt.hardware [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1583.037681] env[63024]: DEBUG nova.virt.hardware [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1583.037681] env[63024]: DEBUG nova.virt.hardware [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1583.037681] env[63024]: DEBUG nova.virt.hardware [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1583.042590] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35c16592-d3ba-4b12-b8b8-09bd48e488c2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.046151] env[63024]: DEBUG oslo_vmware.api [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Task: {'id': task-1950402, 'name': Rename_Task, 'duration_secs': 0.221806} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1583.046729] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1583.047584] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0c317e0d-99f4-4eae-b66b-718bc1a57843 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.054314] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4213c88f-59b8-4331-bb0b-95d9a8f514e0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.060371] env[63024]: DEBUG oslo_vmware.api [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Waiting for the task: (returnval){ [ 1583.060371] env[63024]: value = "task-1950406" [ 1583.060371] env[63024]: _type = "Task" [ 1583.060371] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1583.081817] env[63024]: DEBUG oslo_vmware.api [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Task: {'id': task-1950406, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.092063] env[63024]: DEBUG oslo_vmware.api [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52a4063d-8f42-6407-bdb2-d006aea9c3ca, 'name': SearchDatastore_Task, 'duration_secs': 0.015048} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1583.092895] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dbdaacc2-4151-4584-bdf2-5ecaf3f2a7bf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.099027] env[63024]: DEBUG oslo_vmware.api [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Waiting for the task: (returnval){ [ 1583.099027] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52fb484c-f227-3491-1715-7aa2a07f9177" [ 1583.099027] env[63024]: _type = "Task" [ 1583.099027] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1583.107695] env[63024]: DEBUG oslo_vmware.api [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52fb484c-f227-3491-1715-7aa2a07f9177, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.394310] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950405, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.463592] env[63024]: DEBUG oslo_concurrency.lockutils [req-7af53148-1f64-4321-901f-008fbd083132 req-0a030295-c29d-4d36-aa45-2bca38b3a68e service nova] Releasing lock "refresh_cache-e03b8577-9298-4e88-98ea-6258e97db28d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1583.491322] env[63024]: DEBUG oslo_concurrency.lockutils [None req-988d5bc0-fea9-4cd4-83c5-eae1d9fdd5b7 tempest-DeleteServersAdminTestJSON-2143423831 tempest-DeleteServersAdminTestJSON-2143423831-project-admin] Lock "f90f35a2-f2ee-45e2-a9e4-afce50f29aa0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.384s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1583.579769] env[63024]: DEBUG oslo_vmware.api [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Task: {'id': task-1950406, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.611417] env[63024]: DEBUG oslo_vmware.api [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52fb484c-f227-3491-1715-7aa2a07f9177, 'name': SearchDatastore_Task, 'duration_secs': 0.034068} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1583.613043] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1583.613043] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] e03b8577-9298-4e88-98ea-6258e97db28d/e03b8577-9298-4e88-98ea-6258e97db28d.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1583.613043] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-125fad65-9c49-4900-ace6-45b21b884e74 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1583.623518] env[63024]: DEBUG oslo_vmware.api [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Waiting for the task: (returnval){ [ 1583.623518] env[63024]: value = "task-1950407" [ 1583.623518] env[63024]: _type = "Task" [ 1583.623518] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1583.632772] env[63024]: DEBUG oslo_vmware.api [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Task: {'id': task-1950407, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1583.675329] env[63024]: DEBUG nova.network.neutron [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] Successfully updated port: 459dce7c-f846-4532-bf5c-5ae83c31b50a {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1583.900146] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950405, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.081073] env[63024]: DEBUG oslo_vmware.api [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Task: {'id': task-1950406, 'name': PowerOnVM_Task, 'duration_secs': 0.757323} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1584.082042] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1584.082287] env[63024]: INFO nova.compute.manager [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] Took 12.11 seconds to spawn the instance on the hypervisor. [ 1584.082465] env[63024]: DEBUG nova.compute.manager [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1584.083370] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aca7e00-112c-4880-9a99-5d2255543450 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.140144] env[63024]: DEBUG oslo_vmware.api [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Task: {'id': task-1950407, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.179422] env[63024]: DEBUG oslo_concurrency.lockutils [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Acquiring lock "refresh_cache-de31255d-b82f-4f32-82b2-0a8368fe2510" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1584.179422] env[63024]: DEBUG oslo_concurrency.lockutils [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Acquired lock "refresh_cache-de31255d-b82f-4f32-82b2-0a8368fe2510" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1584.179570] env[63024]: DEBUG nova.network.neutron [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1584.359633] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7b12bf4-b4ce-4dcb-8816-1ba0462039ab {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.370949] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7fcd17f-5e37-4b4b-a2b8-11f9c51e611c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.415323] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17e46e0e-58ca-4c64-b8a0-432b7dda0a42 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.423386] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950405, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.426602] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0f79396-22e6-4363-af28-f812c33fa5b0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.441548] env[63024]: DEBUG nova.compute.provider_tree [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1584.456998] env[63024]: DEBUG nova.compute.manager [req-fc61252b-3865-4a94-9a17-793ad3a5c8b9 req-106903fa-1344-4ba0-b905-3b1c8a2ee7f8 service nova] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Received event network-vif-plugged-0b9a5894-831a-4645-8fee-497016b36839 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1584.457563] env[63024]: DEBUG oslo_concurrency.lockutils [req-fc61252b-3865-4a94-9a17-793ad3a5c8b9 req-106903fa-1344-4ba0-b905-3b1c8a2ee7f8 service nova] Acquiring lock "7146277f-2621-4e8f-a14c-49bf4dd052db-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1584.457563] env[63024]: DEBUG oslo_concurrency.lockutils [req-fc61252b-3865-4a94-9a17-793ad3a5c8b9 req-106903fa-1344-4ba0-b905-3b1c8a2ee7f8 service nova] Lock "7146277f-2621-4e8f-a14c-49bf4dd052db-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1584.457707] env[63024]: DEBUG oslo_concurrency.lockutils [req-fc61252b-3865-4a94-9a17-793ad3a5c8b9 req-106903fa-1344-4ba0-b905-3b1c8a2ee7f8 service nova] Lock "7146277f-2621-4e8f-a14c-49bf4dd052db-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1584.457783] env[63024]: DEBUG nova.compute.manager [req-fc61252b-3865-4a94-9a17-793ad3a5c8b9 req-106903fa-1344-4ba0-b905-3b1c8a2ee7f8 service nova] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] No waiting events found dispatching network-vif-plugged-0b9a5894-831a-4645-8fee-497016b36839 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1584.458240] env[63024]: WARNING nova.compute.manager [req-fc61252b-3865-4a94-9a17-793ad3a5c8b9 req-106903fa-1344-4ba0-b905-3b1c8a2ee7f8 service nova] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Received unexpected event network-vif-plugged-0b9a5894-831a-4645-8fee-497016b36839 for instance with vm_state building and task_state spawning. [ 1584.458240] env[63024]: DEBUG nova.compute.manager [req-fc61252b-3865-4a94-9a17-793ad3a5c8b9 req-106903fa-1344-4ba0-b905-3b1c8a2ee7f8 service nova] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Received event network-changed-0b9a5894-831a-4645-8fee-497016b36839 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1584.458240] env[63024]: DEBUG nova.compute.manager [req-fc61252b-3865-4a94-9a17-793ad3a5c8b9 req-106903fa-1344-4ba0-b905-3b1c8a2ee7f8 service nova] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Refreshing instance network info cache due to event network-changed-0b9a5894-831a-4645-8fee-497016b36839. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1584.458688] env[63024]: DEBUG oslo_concurrency.lockutils [req-fc61252b-3865-4a94-9a17-793ad3a5c8b9 req-106903fa-1344-4ba0-b905-3b1c8a2ee7f8 service nova] Acquiring lock "refresh_cache-7146277f-2621-4e8f-a14c-49bf4dd052db" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1584.458688] env[63024]: DEBUG oslo_concurrency.lockutils [req-fc61252b-3865-4a94-9a17-793ad3a5c8b9 req-106903fa-1344-4ba0-b905-3b1c8a2ee7f8 service nova] Acquired lock "refresh_cache-7146277f-2621-4e8f-a14c-49bf4dd052db" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1584.458688] env[63024]: DEBUG nova.network.neutron [req-fc61252b-3865-4a94-9a17-793ad3a5c8b9 req-106903fa-1344-4ba0-b905-3b1c8a2ee7f8 service nova] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Refreshing network info cache for port 0b9a5894-831a-4645-8fee-497016b36839 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1584.487727] env[63024]: DEBUG oslo_concurrency.lockutils [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "9cf45c3a-2a74-4f8e-8817-47bbd748a44b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1584.487727] env[63024]: DEBUG oslo_concurrency.lockutils [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "9cf45c3a-2a74-4f8e-8817-47bbd748a44b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1584.613134] env[63024]: INFO nova.compute.manager [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] Took 19.32 seconds to build instance. [ 1584.637373] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Acquiring lock "d49eae54-cccb-4281-aaa0-d6974529eb7b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1584.638133] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Lock "d49eae54-cccb-4281-aaa0-d6974529eb7b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1584.643243] env[63024]: DEBUG oslo_vmware.api [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Task: {'id': task-1950407, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1584.732886] env[63024]: DEBUG nova.network.neutron [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1584.917300] env[63024]: DEBUG nova.network.neutron [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] Updating instance_info_cache with network_info: [{"id": "459dce7c-f846-4532-bf5c-5ae83c31b50a", "address": "fa:16:3e:3d:f0:31", "network": {"id": "79a790df-9e4c-44c6-96c2-d7ae51fb6ff4", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1176450962-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86b72c4136c2427785ba0a458352ef78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "360308f4-9d0a-4ec2-8bcf-44891f452847", "external-id": "nsx-vlan-transportzone-383", "segmentation_id": 383, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap459dce7c-f8", "ovs_interfaceid": "459dce7c-f846-4532-bf5c-5ae83c31b50a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1584.927331] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950405, 'name': CreateVM_Task, 'duration_secs': 1.555869} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1584.927516] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1584.928261] env[63024]: DEBUG oslo_concurrency.lockutils [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1584.928394] env[63024]: DEBUG oslo_concurrency.lockutils [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1584.929663] env[63024]: DEBUG oslo_concurrency.lockutils [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1584.929765] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0978dd5b-7847-4242-b6e7-15f169258349 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1584.937720] env[63024]: DEBUG oslo_vmware.api [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Waiting for the task: (returnval){ [ 1584.937720] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5296ad41-8a1c-5dc3-dd45-95a587f9e500" [ 1584.937720] env[63024]: _type = "Task" [ 1584.937720] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1584.944771] env[63024]: DEBUG nova.scheduler.client.report [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1584.952742] env[63024]: DEBUG oslo_vmware.api [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5296ad41-8a1c-5dc3-dd45-95a587f9e500, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.119512] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c3d433fa-e336-4d44-9e21-8c97c7749dc1 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Lock "2bfcd5e1-b1d9-4829-bea5-d8c460ceec16" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.834s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1585.134725] env[63024]: DEBUG oslo_vmware.api [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Task: {'id': task-1950407, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.464359} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1585.135583] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] e03b8577-9298-4e88-98ea-6258e97db28d/e03b8577-9298-4e88-98ea-6258e97db28d.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1585.135805] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1585.136062] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1258510a-055d-4d54-b1aa-136d7b5aef04 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.143517] env[63024]: DEBUG oslo_vmware.api [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Waiting for the task: (returnval){ [ 1585.143517] env[63024]: value = "task-1950408" [ 1585.143517] env[63024]: _type = "Task" [ 1585.143517] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1585.156495] env[63024]: DEBUG oslo_vmware.api [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Task: {'id': task-1950408, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.421989] env[63024]: DEBUG oslo_concurrency.lockutils [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Releasing lock "refresh_cache-de31255d-b82f-4f32-82b2-0a8368fe2510" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1585.422706] env[63024]: DEBUG nova.compute.manager [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] Instance network_info: |[{"id": "459dce7c-f846-4532-bf5c-5ae83c31b50a", "address": "fa:16:3e:3d:f0:31", "network": {"id": "79a790df-9e4c-44c6-96c2-d7ae51fb6ff4", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1176450962-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86b72c4136c2427785ba0a458352ef78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "360308f4-9d0a-4ec2-8bcf-44891f452847", "external-id": "nsx-vlan-transportzone-383", "segmentation_id": 383, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap459dce7c-f8", "ovs_interfaceid": "459dce7c-f846-4532-bf5c-5ae83c31b50a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1585.425828] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3d:f0:31', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '360308f4-9d0a-4ec2-8bcf-44891f452847', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '459dce7c-f846-4532-bf5c-5ae83c31b50a', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1585.438120] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Creating folder: Project (86b72c4136c2427785ba0a458352ef78). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1585.438120] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-32924256-7a22-4497-a5f8-1b5c93c7ebdf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.448252] env[63024]: DEBUG nova.network.neutron [req-fc61252b-3865-4a94-9a17-793ad3a5c8b9 req-106903fa-1344-4ba0-b905-3b1c8a2ee7f8 service nova] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Updated VIF entry in instance network info cache for port 0b9a5894-831a-4645-8fee-497016b36839. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1585.448252] env[63024]: DEBUG nova.network.neutron [req-fc61252b-3865-4a94-9a17-793ad3a5c8b9 req-106903fa-1344-4ba0-b905-3b1c8a2ee7f8 service nova] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Updating instance_info_cache with network_info: [{"id": "0b9a5894-831a-4645-8fee-497016b36839", "address": "fa:16:3e:16:86:10", "network": {"id": "8df09e32-2cef-4759-a80d-a935973669ba", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-205905598-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18ce2b5b54b54a71af0b71ba887dd2a4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11b669be-fb26-4ef8-bdb6-c77ab9d06daf", "external-id": "nsx-vlan-transportzone-633", "segmentation_id": 633, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b9a5894-83", "ovs_interfaceid": "0b9a5894-831a-4645-8fee-497016b36839", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1585.457277] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.517s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1585.457969] env[63024]: DEBUG nova.compute.manager [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1585.461564] env[63024]: DEBUG oslo_vmware.api [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5296ad41-8a1c-5dc3-dd45-95a587f9e500, 'name': SearchDatastore_Task, 'duration_secs': 0.055104} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1585.463785] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 13.016s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1585.466342] env[63024]: DEBUG oslo_concurrency.lockutils [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1585.466342] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1585.466342] env[63024]: DEBUG oslo_concurrency.lockutils [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1585.466342] env[63024]: DEBUG oslo_concurrency.lockutils [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1585.466602] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1585.466602] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Created folder: Project (86b72c4136c2427785ba0a458352ef78) in parent group-v401959. [ 1585.466602] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Creating folder: Instances. Parent ref: group-v401995. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1585.469079] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4f167f46-670f-437a-98f3-b5bcd75102f0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.469079] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3dc67687-bf56-4107-8d11-bbbb94a1b9a6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.482481] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Created folder: Instances in parent group-v401995. [ 1585.482481] env[63024]: DEBUG oslo.service.loopingcall [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1585.482481] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1585.482481] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1585.482481] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1585.482956] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-010fbf6d-f821-444d-9cdb-f2d121a6c3e4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.485671] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4b13141a-6bbd-4481-8f95-5a2d9ffd9220 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.515025] env[63024]: DEBUG oslo_vmware.api [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Waiting for the task: (returnval){ [ 1585.515025] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52042024-d950-dcb1-cacd-c85d8bf4036e" [ 1585.515025] env[63024]: _type = "Task" [ 1585.515025] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1585.515025] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1585.515025] env[63024]: value = "task-1950411" [ 1585.515025] env[63024]: _type = "Task" [ 1585.515025] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1585.529936] env[63024]: DEBUG oslo_vmware.api [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52042024-d950-dcb1-cacd-c85d8bf4036e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.534255] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950411, 'name': CreateVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.622582] env[63024]: DEBUG nova.compute.manager [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1585.655693] env[63024]: DEBUG oslo_vmware.api [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Task: {'id': task-1950408, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08624} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1585.656276] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1585.657304] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c5d1748-4ff3-45ae-b931-c6e0f7ea3bf3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.689616] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Reconfiguring VM instance instance-0000000a to attach disk [datastore1] e03b8577-9298-4e88-98ea-6258e97db28d/e03b8577-9298-4e88-98ea-6258e97db28d.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1585.690489] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-60e915bf-1711-4c20-b73e-3ca26704aab3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1585.712121] env[63024]: DEBUG oslo_vmware.api [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Waiting for the task: (returnval){ [ 1585.712121] env[63024]: value = "task-1950412" [ 1585.712121] env[63024]: _type = "Task" [ 1585.712121] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1585.722689] env[63024]: DEBUG oslo_vmware.api [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Task: {'id': task-1950412, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1585.741728] env[63024]: DEBUG nova.network.neutron [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Successfully updated port: de190f04-f1aa-479a-b49d-4cf36ac4475f {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1585.952455] env[63024]: DEBUG oslo_concurrency.lockutils [req-fc61252b-3865-4a94-9a17-793ad3a5c8b9 req-106903fa-1344-4ba0-b905-3b1c8a2ee7f8 service nova] Releasing lock "refresh_cache-7146277f-2621-4e8f-a14c-49bf4dd052db" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1585.964508] env[63024]: DEBUG nova.compute.utils [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1585.969332] env[63024]: DEBUG nova.compute.manager [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1585.969616] env[63024]: DEBUG nova.network.neutron [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1586.032354] env[63024]: DEBUG oslo_vmware.api [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52042024-d950-dcb1-cacd-c85d8bf4036e, 'name': SearchDatastore_Task, 'duration_secs': 0.01719} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1586.036827] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950411, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.037305] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6cf8b826-131d-4601-bfba-0526675af68e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.045647] env[63024]: DEBUG oslo_vmware.api [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Waiting for the task: (returnval){ [ 1586.045647] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52bc9bf3-bad1-8b76-4044-6578eac07f57" [ 1586.045647] env[63024]: _type = "Task" [ 1586.045647] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.054652] env[63024]: DEBUG oslo_vmware.api [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52bc9bf3-bad1-8b76-4044-6578eac07f57, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.130140] env[63024]: DEBUG nova.policy [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'afa40a549c0c4807bf5465c2f70e1660', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '851e1e0d455e4f67ba4bfc3e87eca7f7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1586.161506] env[63024]: DEBUG oslo_concurrency.lockutils [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1586.195316] env[63024]: DEBUG nova.compute.manager [None req-4d31c815-cef6-4d7c-ba79-0fc82737621e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1586.195316] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5122c9c3-253c-4c68-99cf-ed09dd261f74 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.226393] env[63024]: DEBUG oslo_vmware.api [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Task: {'id': task-1950412, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.247055] env[63024]: DEBUG oslo_concurrency.lockutils [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Acquiring lock "refresh_cache-6e477ec2-9270-42b1-85bd-a315460d9cab" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1586.247272] env[63024]: DEBUG oslo_concurrency.lockutils [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Acquired lock "refresh_cache-6e477ec2-9270-42b1-85bd-a315460d9cab" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1586.247512] env[63024]: DEBUG nova.network.neutron [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1586.484021] env[63024]: DEBUG nova.compute.manager [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1586.492030] env[63024]: INFO nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Updating resource usage from migration 85a4ec44-899c-4937-b93d-0eaabd8ff03f [ 1586.528886] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance b0b4d94c-cd5c-4452-baa6-9aeec46b43ad actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1586.529279] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance ccd80e20-9fc2-415a-a428-fcf85994c7f8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1586.529726] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance b629b4f8-f79f-4361-b78c-8705a6888a9e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1586.530177] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 22ef5bae-f7bc-43c7-9d77-1b4547e83b24 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1586.530584] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 61fdfa06-cb40-44a3-8abc-428b26bd40f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1586.530750] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 51532b8e-4adf-4cc7-b91e-885d7934a7e8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1586.531566] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1586.531566] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance e03b8577-9298-4e88-98ea-6258e97db28d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1586.531566] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 7146277f-2621-4e8f-a14c-49bf4dd052db actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1586.531566] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance de31255d-b82f-4f32-82b2-0a8368fe2510 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1586.531743] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 6e477ec2-9270-42b1-85bd-a315460d9cab actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1586.531995] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1586.539617] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950411, 'name': CreateVM_Task, 'duration_secs': 0.595307} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1586.539874] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1586.541500] env[63024]: DEBUG oslo_concurrency.lockutils [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1586.541500] env[63024]: DEBUG oslo_concurrency.lockutils [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1586.541500] env[63024]: DEBUG oslo_concurrency.lockutils [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1586.541937] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d78e9ce-0281-4a2f-915f-89f21ec6e739 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.548650] env[63024]: DEBUG oslo_vmware.api [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Waiting for the task: (returnval){ [ 1586.548650] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5275e51a-1655-545b-7e02-1da26ee49594" [ 1586.548650] env[63024]: _type = "Task" [ 1586.548650] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.563907] env[63024]: DEBUG oslo_vmware.api [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52bc9bf3-bad1-8b76-4044-6578eac07f57, 'name': SearchDatastore_Task, 'duration_secs': 0.017061} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1586.566658] env[63024]: DEBUG oslo_concurrency.lockutils [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1586.567048] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 7146277f-2621-4e8f-a14c-49bf4dd052db/7146277f-2621-4e8f-a14c-49bf4dd052db.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1586.567786] env[63024]: DEBUG oslo_vmware.api [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5275e51a-1655-545b-7e02-1da26ee49594, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.568092] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-50331179-9f23-42f1-b0a4-0ee61bab4a89 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.576654] env[63024]: DEBUG oslo_vmware.api [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Waiting for the task: (returnval){ [ 1586.576654] env[63024]: value = "task-1950413" [ 1586.576654] env[63024]: _type = "Task" [ 1586.576654] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.587725] env[63024]: DEBUG oslo_vmware.api [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Task: {'id': task-1950413, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.607870] env[63024]: DEBUG nova.compute.manager [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Stashing vm_state: active {{(pid=63024) _prep_resize /opt/stack/nova/nova/compute/manager.py:5954}} [ 1586.709663] env[63024]: INFO nova.compute.manager [None req-4d31c815-cef6-4d7c-ba79-0fc82737621e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] instance snapshotting [ 1586.712920] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83af44ed-77c3-451c-852f-4d06907f8bf2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.748863] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-561d5b55-aadf-4a22-b55d-f4358d378e93 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.754130] env[63024]: DEBUG oslo_vmware.api [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Task: {'id': task-1950412, 'name': ReconfigVM_Task, 'duration_secs': 0.684411} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1586.756630] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Reconfigured VM instance instance-0000000a to attach disk [datastore1] e03b8577-9298-4e88-98ea-6258e97db28d/e03b8577-9298-4e88-98ea-6258e97db28d.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1586.757920] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0f24a919-1693-48ae-9b8a-940e43327a17 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1586.769955] env[63024]: DEBUG oslo_vmware.api [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Waiting for the task: (returnval){ [ 1586.769955] env[63024]: value = "task-1950414" [ 1586.769955] env[63024]: _type = "Task" [ 1586.769955] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1586.780082] env[63024]: DEBUG oslo_vmware.api [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Task: {'id': task-1950414, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1586.858425] env[63024]: DEBUG nova.network.neutron [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1587.035855] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 726d9639-1ab4-46a9-975e-5580c8344a37 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1587.063066] env[63024]: DEBUG oslo_vmware.api [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5275e51a-1655-545b-7e02-1da26ee49594, 'name': SearchDatastore_Task, 'duration_secs': 0.014989} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1587.063369] env[63024]: DEBUG oslo_concurrency.lockutils [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1587.063609] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1587.063883] env[63024]: DEBUG oslo_concurrency.lockutils [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1587.063981] env[63024]: DEBUG oslo_concurrency.lockutils [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1587.064196] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1587.064461] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c243ff8f-a17d-4328-b187-8f65ef36399e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.078552] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1587.078552] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1587.078815] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a5ee6e8a-cd2f-467a-acca-c429b5525c44 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.098495] env[63024]: DEBUG oslo_vmware.api [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Task: {'id': task-1950413, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.100459] env[63024]: DEBUG oslo_vmware.api [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Waiting for the task: (returnval){ [ 1587.100459] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52293536-7bc1-a030-bc01-005e7ca6ca5b" [ 1587.100459] env[63024]: _type = "Task" [ 1587.100459] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1587.111986] env[63024]: DEBUG oslo_vmware.api [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52293536-7bc1-a030-bc01-005e7ca6ca5b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.139781] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1587.229638] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Acquiring lock "e3c9e9de-586d-4baa-b4bb-95c41d527a03" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1587.229892] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Lock "e3c9e9de-586d-4baa-b4bb-95c41d527a03" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1587.252360] env[63024]: DEBUG nova.network.neutron [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Successfully created port: 879d1c91-c785-4da7-852e-abd159810127 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1587.268590] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4d31c815-cef6-4d7c-ba79-0fc82737621e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Creating Snapshot of the VM instance {{(pid=63024) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1587.268983] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-3b7f449e-dc12-4a67-84ae-f6bce70c406d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.284288] env[63024]: DEBUG oslo_vmware.api [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Task: {'id': task-1950414, 'name': Rename_Task, 'duration_secs': 0.213259} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1587.286706] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1587.289385] env[63024]: DEBUG oslo_vmware.api [None req-4d31c815-cef6-4d7c-ba79-0fc82737621e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Waiting for the task: (returnval){ [ 1587.289385] env[63024]: value = "task-1950415" [ 1587.289385] env[63024]: _type = "Task" [ 1587.289385] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1587.289385] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e32c6363-9048-4cd7-8ecf-4ff9d87b58a3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.302041] env[63024]: DEBUG oslo_vmware.api [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Waiting for the task: (returnval){ [ 1587.302041] env[63024]: value = "task-1950416" [ 1587.302041] env[63024]: _type = "Task" [ 1587.302041] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1587.307576] env[63024]: DEBUG oslo_vmware.api [None req-4d31c815-cef6-4d7c-ba79-0fc82737621e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950415, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.321914] env[63024]: DEBUG oslo_vmware.api [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Task: {'id': task-1950416, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.499468] env[63024]: DEBUG nova.compute.manager [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1587.533561] env[63024]: DEBUG nova.virt.hardware [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1587.534275] env[63024]: DEBUG nova.virt.hardware [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1587.534445] env[63024]: DEBUG nova.virt.hardware [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1587.535155] env[63024]: DEBUG nova.virt.hardware [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1587.535155] env[63024]: DEBUG nova.virt.hardware [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1587.535155] env[63024]: DEBUG nova.virt.hardware [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1587.535155] env[63024]: DEBUG nova.virt.hardware [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1587.535497] env[63024]: DEBUG nova.virt.hardware [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1587.535497] env[63024]: DEBUG nova.virt.hardware [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1587.535570] env[63024]: DEBUG nova.virt.hardware [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1587.536931] env[63024]: DEBUG nova.virt.hardware [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1587.536931] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8a44e13-8d45-4cc6-9297-d17576060496 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.542695] env[63024]: DEBUG nova.network.neutron [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Updating instance_info_cache with network_info: [{"id": "de190f04-f1aa-479a-b49d-4cf36ac4475f", "address": "fa:16:3e:a0:ae:e0", "network": {"id": "13eec5ec-8470-475f-803f-c9b5b84898ef", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1638080157-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "142b37c08dcc40e5ab889fa485953fab", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92e4d027-e755-417b-8eea-9a8f24b85140", "external-id": "nsx-vlan-transportzone-756", "segmentation_id": 756, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde190f04-f1", "ovs_interfaceid": "de190f04-f1aa-479a-b49d-4cf36ac4475f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1587.544917] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 17e1dfa2-b104-4aac-928e-6364da155c3d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1587.555313] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-750a891c-c1c2-4fc5-adb0-f02b5f238693 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.593254] env[63024]: DEBUG oslo_vmware.api [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Task: {'id': task-1950413, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.619596] env[63024]: DEBUG oslo_vmware.api [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52293536-7bc1-a030-bc01-005e7ca6ca5b, 'name': SearchDatastore_Task, 'duration_secs': 0.020531} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1587.621447] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-95a3dbf4-6e63-43a4-bcfa-cdf72245c143 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1587.631360] env[63024]: DEBUG oslo_vmware.api [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Waiting for the task: (returnval){ [ 1587.631360] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5232902d-f6f7-af6b-71ed-a026b756f778" [ 1587.631360] env[63024]: _type = "Task" [ 1587.631360] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1587.639497] env[63024]: DEBUG oslo_vmware.api [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5232902d-f6f7-af6b-71ed-a026b756f778, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.652019] env[63024]: DEBUG oslo_concurrency.lockutils [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Acquiring lock "85d6db13-d317-498e-a36a-972e9b36e82b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1587.652019] env[63024]: DEBUG oslo_concurrency.lockutils [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Lock "85d6db13-d317-498e-a36a-972e9b36e82b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1587.805076] env[63024]: DEBUG oslo_vmware.api [None req-4d31c815-cef6-4d7c-ba79-0fc82737621e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950415, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.817739] env[63024]: DEBUG oslo_vmware.api [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Task: {'id': task-1950416, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1587.942846] env[63024]: DEBUG nova.compute.manager [req-493f0ca1-b592-4bba-b66a-ce383fced79c req-a4fa35b8-e91b-41d1-aa9c-3ff8f2110cc1 service nova] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Received event network-changed-421d2adb-43a2-41f5-b64d-29989f6a0fa5 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1587.943083] env[63024]: DEBUG nova.compute.manager [req-493f0ca1-b592-4bba-b66a-ce383fced79c req-a4fa35b8-e91b-41d1-aa9c-3ff8f2110cc1 service nova] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Refreshing instance network info cache due to event network-changed-421d2adb-43a2-41f5-b64d-29989f6a0fa5. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1587.943922] env[63024]: DEBUG oslo_concurrency.lockutils [req-493f0ca1-b592-4bba-b66a-ce383fced79c req-a4fa35b8-e91b-41d1-aa9c-3ff8f2110cc1 service nova] Acquiring lock "refresh_cache-61fdfa06-cb40-44a3-8abc-428b26bd40f5" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1587.943922] env[63024]: DEBUG oslo_concurrency.lockutils [req-493f0ca1-b592-4bba-b66a-ce383fced79c req-a4fa35b8-e91b-41d1-aa9c-3ff8f2110cc1 service nova] Acquired lock "refresh_cache-61fdfa06-cb40-44a3-8abc-428b26bd40f5" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1587.943922] env[63024]: DEBUG nova.network.neutron [req-493f0ca1-b592-4bba-b66a-ce383fced79c req-a4fa35b8-e91b-41d1-aa9c-3ff8f2110cc1 service nova] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Refreshing network info cache for port 421d2adb-43a2-41f5-b64d-29989f6a0fa5 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1588.049799] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance e2138192-14e0-43d2-9d19-9820747d7217 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1588.054028] env[63024]: DEBUG oslo_concurrency.lockutils [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Releasing lock "refresh_cache-6e477ec2-9270-42b1-85bd-a315460d9cab" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1588.054304] env[63024]: DEBUG nova.compute.manager [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Instance network_info: |[{"id": "de190f04-f1aa-479a-b49d-4cf36ac4475f", "address": "fa:16:3e:a0:ae:e0", "network": {"id": "13eec5ec-8470-475f-803f-c9b5b84898ef", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1638080157-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "142b37c08dcc40e5ab889fa485953fab", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92e4d027-e755-417b-8eea-9a8f24b85140", "external-id": "nsx-vlan-transportzone-756", "segmentation_id": 756, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde190f04-f1", "ovs_interfaceid": "de190f04-f1aa-479a-b49d-4cf36ac4475f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1588.054843] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a0:ae:e0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '92e4d027-e755-417b-8eea-9a8f24b85140', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'de190f04-f1aa-479a-b49d-4cf36ac4475f', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1588.064732] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Creating folder: Project (142b37c08dcc40e5ab889fa485953fab). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1588.065939] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-138f32f0-a7a3-4bbd-b0ae-7b1bbb15d735 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.077415] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Created folder: Project (142b37c08dcc40e5ab889fa485953fab) in parent group-v401959. [ 1588.077582] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Creating folder: Instances. Parent ref: group-v401998. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1588.079304] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-437d6939-33df-485c-abf8-b8e3829f1582 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.083410] env[63024]: DEBUG nova.compute.manager [req-f396fed8-e73e-4624-8bd0-126dc2e59c55 req-dda8d1b8-d891-4a1a-bdf1-e2fe7d127844 service nova] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] Received event network-vif-plugged-459dce7c-f846-4532-bf5c-5ae83c31b50a {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1588.083574] env[63024]: DEBUG oslo_concurrency.lockutils [req-f396fed8-e73e-4624-8bd0-126dc2e59c55 req-dda8d1b8-d891-4a1a-bdf1-e2fe7d127844 service nova] Acquiring lock "de31255d-b82f-4f32-82b2-0a8368fe2510-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1588.083664] env[63024]: DEBUG oslo_concurrency.lockutils [req-f396fed8-e73e-4624-8bd0-126dc2e59c55 req-dda8d1b8-d891-4a1a-bdf1-e2fe7d127844 service nova] Lock "de31255d-b82f-4f32-82b2-0a8368fe2510-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1588.083823] env[63024]: DEBUG oslo_concurrency.lockutils [req-f396fed8-e73e-4624-8bd0-126dc2e59c55 req-dda8d1b8-d891-4a1a-bdf1-e2fe7d127844 service nova] Lock "de31255d-b82f-4f32-82b2-0a8368fe2510-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1588.084055] env[63024]: DEBUG nova.compute.manager [req-f396fed8-e73e-4624-8bd0-126dc2e59c55 req-dda8d1b8-d891-4a1a-bdf1-e2fe7d127844 service nova] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] No waiting events found dispatching network-vif-plugged-459dce7c-f846-4532-bf5c-5ae83c31b50a {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1588.084283] env[63024]: WARNING nova.compute.manager [req-f396fed8-e73e-4624-8bd0-126dc2e59c55 req-dda8d1b8-d891-4a1a-bdf1-e2fe7d127844 service nova] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] Received unexpected event network-vif-plugged-459dce7c-f846-4532-bf5c-5ae83c31b50a for instance with vm_state building and task_state spawning. [ 1588.084889] env[63024]: DEBUG nova.compute.manager [req-f396fed8-e73e-4624-8bd0-126dc2e59c55 req-dda8d1b8-d891-4a1a-bdf1-e2fe7d127844 service nova] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] Received event network-changed-459dce7c-f846-4532-bf5c-5ae83c31b50a {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1588.085112] env[63024]: DEBUG nova.compute.manager [req-f396fed8-e73e-4624-8bd0-126dc2e59c55 req-dda8d1b8-d891-4a1a-bdf1-e2fe7d127844 service nova] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] Refreshing instance network info cache due to event network-changed-459dce7c-f846-4532-bf5c-5ae83c31b50a. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1588.085316] env[63024]: DEBUG oslo_concurrency.lockutils [req-f396fed8-e73e-4624-8bd0-126dc2e59c55 req-dda8d1b8-d891-4a1a-bdf1-e2fe7d127844 service nova] Acquiring lock "refresh_cache-de31255d-b82f-4f32-82b2-0a8368fe2510" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1588.085457] env[63024]: DEBUG oslo_concurrency.lockutils [req-f396fed8-e73e-4624-8bd0-126dc2e59c55 req-dda8d1b8-d891-4a1a-bdf1-e2fe7d127844 service nova] Acquired lock "refresh_cache-de31255d-b82f-4f32-82b2-0a8368fe2510" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1588.085611] env[63024]: DEBUG nova.network.neutron [req-f396fed8-e73e-4624-8bd0-126dc2e59c55 req-dda8d1b8-d891-4a1a-bdf1-e2fe7d127844 service nova] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] Refreshing network info cache for port 459dce7c-f846-4532-bf5c-5ae83c31b50a {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1588.101684] env[63024]: DEBUG oslo_vmware.api [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Task: {'id': task-1950413, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.106415} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1588.102086] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 7146277f-2621-4e8f-a14c-49bf4dd052db/7146277f-2621-4e8f-a14c-49bf4dd052db.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1588.103281] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1588.103281] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5c88ddf0-902e-47ab-90fb-bc0cb315f334 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.111293] env[63024]: DEBUG oslo_vmware.api [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Waiting for the task: (returnval){ [ 1588.111293] env[63024]: value = "task-1950419" [ 1588.111293] env[63024]: _type = "Task" [ 1588.111293] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1588.116588] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Created folder: Instances in parent group-v401998. [ 1588.116588] env[63024]: DEBUG oslo.service.loopingcall [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1588.116979] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1588.117443] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9fa312dc-fe62-4fba-9ccf-7346cf175f94 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.139235] env[63024]: DEBUG oslo_vmware.api [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Task: {'id': task-1950419, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.151997] env[63024]: DEBUG oslo_vmware.api [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5232902d-f6f7-af6b-71ed-a026b756f778, 'name': SearchDatastore_Task, 'duration_secs': 0.037255} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1588.151997] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1588.151997] env[63024]: value = "task-1950420" [ 1588.151997] env[63024]: _type = "Task" [ 1588.151997] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1588.153129] env[63024]: DEBUG oslo_concurrency.lockutils [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1588.153129] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] de31255d-b82f-4f32-82b2-0a8368fe2510/de31255d-b82f-4f32-82b2-0a8368fe2510.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1588.153129] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-43ab36d5-1c9f-4c86-901a-17bc8fc6697b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.170703] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950420, 'name': CreateVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.172913] env[63024]: DEBUG oslo_vmware.api [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Waiting for the task: (returnval){ [ 1588.172913] env[63024]: value = "task-1950421" [ 1588.172913] env[63024]: _type = "Task" [ 1588.172913] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1588.189222] env[63024]: DEBUG oslo_vmware.api [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Task: {'id': task-1950421, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.304715] env[63024]: DEBUG oslo_vmware.api [None req-4d31c815-cef6-4d7c-ba79-0fc82737621e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950415, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.316767] env[63024]: DEBUG oslo_concurrency.lockutils [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Acquiring lock "b7f26f0e-d5a9-42a6-8af2-065659f89cf5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1588.317076] env[63024]: DEBUG oslo_concurrency.lockutils [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Lock "b7f26f0e-d5a9-42a6-8af2-065659f89cf5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1588.322816] env[63024]: DEBUG oslo_vmware.api [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Task: {'id': task-1950416, 'name': PowerOnVM_Task, 'duration_secs': 0.955222} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1588.323116] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1588.323411] env[63024]: INFO nova.compute.manager [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Took 13.92 seconds to spawn the instance on the hypervisor. [ 1588.323528] env[63024]: DEBUG nova.compute.manager [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1588.324665] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8576247-b49b-4bac-ac23-0ef30f072347 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.558565] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance b765b8b3-a099-4e23-be30-d1178ecffc37 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1588.634310] env[63024]: DEBUG oslo_vmware.api [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Task: {'id': task-1950419, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.12831} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1588.634683] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1588.638140] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eaa9541-357a-46aa-b40a-d7b9e57687ad {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.681598] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Reconfiguring VM instance instance-0000000b to attach disk [datastore1] 7146277f-2621-4e8f-a14c-49bf4dd052db/7146277f-2621-4e8f-a14c-49bf4dd052db.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1588.686362] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2bbdbf71-5b13-440a-8db1-e32a4745a969 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1588.718841] env[63024]: DEBUG oslo_vmware.api [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Task: {'id': task-1950421, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.724405] env[63024]: DEBUG oslo_vmware.api [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Waiting for the task: (returnval){ [ 1588.724405] env[63024]: value = "task-1950422" [ 1588.724405] env[63024]: _type = "Task" [ 1588.724405] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1588.724806] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950420, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.739815] env[63024]: DEBUG oslo_vmware.api [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Task: {'id': task-1950422, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.751095] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Acquiring lock "ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1588.751095] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Lock "ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1588.804544] env[63024]: DEBUG oslo_vmware.api [None req-4d31c815-cef6-4d7c-ba79-0fc82737621e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950415, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1588.846133] env[63024]: INFO nova.compute.manager [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Took 22.13 seconds to build instance. [ 1589.019304] env[63024]: DEBUG oslo_vmware.rw_handles [None req-c48082a4-eda9-4a2f-b335-d346cf978e89 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525dbbb7-b010-8452-5577-36d76b588c71/disk-0.vmdk. {{(pid=63024) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1589.019958] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-383d0c37-bdf6-4cde-9520-741b45d95205 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.026833] env[63024]: DEBUG oslo_vmware.rw_handles [None req-c48082a4-eda9-4a2f-b335-d346cf978e89 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525dbbb7-b010-8452-5577-36d76b588c71/disk-0.vmdk is in state: ready. {{(pid=63024) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1589.027022] env[63024]: ERROR oslo_vmware.rw_handles [None req-c48082a4-eda9-4a2f-b335-d346cf978e89 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525dbbb7-b010-8452-5577-36d76b588c71/disk-0.vmdk due to incomplete transfer. [ 1589.027267] env[63024]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-c6559aea-683c-4fed-8bcc-3418be4a8d08 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.033791] env[63024]: DEBUG oslo_vmware.rw_handles [None req-c48082a4-eda9-4a2f-b335-d346cf978e89 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/525dbbb7-b010-8452-5577-36d76b588c71/disk-0.vmdk. {{(pid=63024) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1589.033985] env[63024]: DEBUG nova.virt.vmwareapi.images [None req-c48082a4-eda9-4a2f-b335-d346cf978e89 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Uploaded image 64e00b8d-8a62-479c-8371-8a7594d98ee0 to the Glance image server {{(pid=63024) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1589.035993] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-c48082a4-eda9-4a2f-b335-d346cf978e89 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Destroying the VM {{(pid=63024) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1589.036177] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-bcd04bf0-3195-441e-99aa-508945624a6f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.042226] env[63024]: DEBUG oslo_vmware.api [None req-c48082a4-eda9-4a2f-b335-d346cf978e89 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1589.042226] env[63024]: value = "task-1950423" [ 1589.042226] env[63024]: _type = "Task" [ 1589.042226] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.050380] env[63024]: DEBUG oslo_vmware.api [None req-c48082a4-eda9-4a2f-b335-d346cf978e89 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950423, 'name': Destroy_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.064350] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1589.132600] env[63024]: DEBUG nova.network.neutron [req-493f0ca1-b592-4bba-b66a-ce383fced79c req-a4fa35b8-e91b-41d1-aa9c-3ff8f2110cc1 service nova] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Updated VIF entry in instance network info cache for port 421d2adb-43a2-41f5-b64d-29989f6a0fa5. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1589.134353] env[63024]: DEBUG nova.network.neutron [req-493f0ca1-b592-4bba-b66a-ce383fced79c req-a4fa35b8-e91b-41d1-aa9c-3ff8f2110cc1 service nova] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Updating instance_info_cache with network_info: [{"id": "421d2adb-43a2-41f5-b64d-29989f6a0fa5", "address": "fa:16:3e:9e:9f:76", "network": {"id": "ab69012e-45b8-478c-9986-252d2f87d7d9", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-1505663829-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.190", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "7e2bf9d113204b598844e72d5d49f155", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bed837fa-6b6a-4192-a229-a99426a46065", "external-id": "nsx-vlan-transportzone-954", "segmentation_id": 954, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap421d2adb-43", "ovs_interfaceid": "421d2adb-43a2-41f5-b64d-29989f6a0fa5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1589.180980] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950420, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.197623] env[63024]: DEBUG oslo_vmware.api [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Task: {'id': task-1950421, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.708259} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1589.197887] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] de31255d-b82f-4f32-82b2-0a8368fe2510/de31255d-b82f-4f32-82b2-0a8368fe2510.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1589.198119] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1589.198730] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-baf94a76-1728-4bd7-924d-3b7c4419485c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.206814] env[63024]: DEBUG oslo_vmware.api [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Waiting for the task: (returnval){ [ 1589.206814] env[63024]: value = "task-1950424" [ 1589.206814] env[63024]: _type = "Task" [ 1589.206814] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.215694] env[63024]: DEBUG oslo_vmware.api [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Task: {'id': task-1950424, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.237865] env[63024]: DEBUG oslo_vmware.api [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Task: {'id': task-1950422, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.252036] env[63024]: DEBUG nova.network.neutron [req-f396fed8-e73e-4624-8bd0-126dc2e59c55 req-dda8d1b8-d891-4a1a-bdf1-e2fe7d127844 service nova] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] Updated VIF entry in instance network info cache for port 459dce7c-f846-4532-bf5c-5ae83c31b50a. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1589.252420] env[63024]: DEBUG nova.network.neutron [req-f396fed8-e73e-4624-8bd0-126dc2e59c55 req-dda8d1b8-d891-4a1a-bdf1-e2fe7d127844 service nova] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] Updating instance_info_cache with network_info: [{"id": "459dce7c-f846-4532-bf5c-5ae83c31b50a", "address": "fa:16:3e:3d:f0:31", "network": {"id": "79a790df-9e4c-44c6-96c2-d7ae51fb6ff4", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1176450962-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "86b72c4136c2427785ba0a458352ef78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "360308f4-9d0a-4ec2-8bcf-44891f452847", "external-id": "nsx-vlan-transportzone-383", "segmentation_id": 383, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap459dce7c-f8", "ovs_interfaceid": "459dce7c-f846-4532-bf5c-5ae83c31b50a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1589.303231] env[63024]: DEBUG oslo_vmware.api [None req-4d31c815-cef6-4d7c-ba79-0fc82737621e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950415, 'name': CreateSnapshot_Task, 'duration_secs': 1.703684} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1589.303607] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4d31c815-cef6-4d7c-ba79-0fc82737621e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Created Snapshot of the VM instance {{(pid=63024) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1589.304338] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fa1eac1-cecf-4995-a22d-a427a4cabb0d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.347953] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8582b291-2e51-4e73-ab7d-6861b3765a21 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Lock "e03b8577-9298-4e88-98ea-6258e97db28d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.653s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1589.553761] env[63024]: DEBUG oslo_vmware.api [None req-c48082a4-eda9-4a2f-b335-d346cf978e89 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950423, 'name': Destroy_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.567802] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance bd07735a-6a75-45fb-9cef-e1f2c301a489 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1589.632497] env[63024]: DEBUG nova.network.neutron [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Successfully updated port: 879d1c91-c785-4da7-852e-abd159810127 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1589.637956] env[63024]: DEBUG oslo_concurrency.lockutils [req-493f0ca1-b592-4bba-b66a-ce383fced79c req-a4fa35b8-e91b-41d1-aa9c-3ff8f2110cc1 service nova] Releasing lock "refresh_cache-61fdfa06-cb40-44a3-8abc-428b26bd40f5" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1589.678996] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950420, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.715576] env[63024]: DEBUG oslo_vmware.api [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Task: {'id': task-1950424, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.089562} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1589.715857] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1589.719466] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-036c75ee-7811-4c62-bfe9-f2cace142c0e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.742940] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] Reconfiguring VM instance instance-0000000c to attach disk [datastore1] de31255d-b82f-4f32-82b2-0a8368fe2510/de31255d-b82f-4f32-82b2-0a8368fe2510.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1589.746265] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2eba98f9-0639-4ce7-bf9d-055ef4c4b960 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.761818] env[63024]: DEBUG oslo_concurrency.lockutils [req-f396fed8-e73e-4624-8bd0-126dc2e59c55 req-dda8d1b8-d891-4a1a-bdf1-e2fe7d127844 service nova] Releasing lock "refresh_cache-de31255d-b82f-4f32-82b2-0a8368fe2510" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1589.761931] env[63024]: DEBUG nova.compute.manager [req-f396fed8-e73e-4624-8bd0-126dc2e59c55 req-dda8d1b8-d891-4a1a-bdf1-e2fe7d127844 service nova] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Received event network-vif-plugged-de190f04-f1aa-479a-b49d-4cf36ac4475f {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1589.762282] env[63024]: DEBUG oslo_concurrency.lockutils [req-f396fed8-e73e-4624-8bd0-126dc2e59c55 req-dda8d1b8-d891-4a1a-bdf1-e2fe7d127844 service nova] Acquiring lock "6e477ec2-9270-42b1-85bd-a315460d9cab-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1589.762569] env[63024]: DEBUG oslo_concurrency.lockutils [req-f396fed8-e73e-4624-8bd0-126dc2e59c55 req-dda8d1b8-d891-4a1a-bdf1-e2fe7d127844 service nova] Lock "6e477ec2-9270-42b1-85bd-a315460d9cab-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1589.762923] env[63024]: DEBUG oslo_concurrency.lockutils [req-f396fed8-e73e-4624-8bd0-126dc2e59c55 req-dda8d1b8-d891-4a1a-bdf1-e2fe7d127844 service nova] Lock "6e477ec2-9270-42b1-85bd-a315460d9cab-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1589.763246] env[63024]: DEBUG nova.compute.manager [req-f396fed8-e73e-4624-8bd0-126dc2e59c55 req-dda8d1b8-d891-4a1a-bdf1-e2fe7d127844 service nova] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] No waiting events found dispatching network-vif-plugged-de190f04-f1aa-479a-b49d-4cf36ac4475f {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1589.763432] env[63024]: WARNING nova.compute.manager [req-f396fed8-e73e-4624-8bd0-126dc2e59c55 req-dda8d1b8-d891-4a1a-bdf1-e2fe7d127844 service nova] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Received unexpected event network-vif-plugged-de190f04-f1aa-479a-b49d-4cf36ac4475f for instance with vm_state building and task_state spawning. [ 1589.763627] env[63024]: DEBUG nova.compute.manager [req-f396fed8-e73e-4624-8bd0-126dc2e59c55 req-dda8d1b8-d891-4a1a-bdf1-e2fe7d127844 service nova] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Received event network-changed-de190f04-f1aa-479a-b49d-4cf36ac4475f {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1589.763891] env[63024]: DEBUG nova.compute.manager [req-f396fed8-e73e-4624-8bd0-126dc2e59c55 req-dda8d1b8-d891-4a1a-bdf1-e2fe7d127844 service nova] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Refreshing instance network info cache due to event network-changed-de190f04-f1aa-479a-b49d-4cf36ac4475f. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1589.764121] env[63024]: DEBUG oslo_concurrency.lockutils [req-f396fed8-e73e-4624-8bd0-126dc2e59c55 req-dda8d1b8-d891-4a1a-bdf1-e2fe7d127844 service nova] Acquiring lock "refresh_cache-6e477ec2-9270-42b1-85bd-a315460d9cab" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1589.764261] env[63024]: DEBUG oslo_concurrency.lockutils [req-f396fed8-e73e-4624-8bd0-126dc2e59c55 req-dda8d1b8-d891-4a1a-bdf1-e2fe7d127844 service nova] Acquired lock "refresh_cache-6e477ec2-9270-42b1-85bd-a315460d9cab" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1589.764427] env[63024]: DEBUG nova.network.neutron [req-f396fed8-e73e-4624-8bd0-126dc2e59c55 req-dda8d1b8-d891-4a1a-bdf1-e2fe7d127844 service nova] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Refreshing network info cache for port de190f04-f1aa-479a-b49d-4cf36ac4475f {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1589.771614] env[63024]: DEBUG oslo_vmware.api [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Task: {'id': task-1950422, 'name': ReconfigVM_Task, 'duration_secs': 0.61129} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1589.772707] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Reconfigured VM instance instance-0000000b to attach disk [datastore1] 7146277f-2621-4e8f-a14c-49bf4dd052db/7146277f-2621-4e8f-a14c-49bf4dd052db.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1589.773401] env[63024]: DEBUG oslo_vmware.api [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Waiting for the task: (returnval){ [ 1589.773401] env[63024]: value = "task-1950425" [ 1589.773401] env[63024]: _type = "Task" [ 1589.773401] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.773650] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f0849b95-f1e8-45b0-a52d-87282b953711 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.785760] env[63024]: DEBUG oslo_vmware.api [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Task: {'id': task-1950425, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.787942] env[63024]: DEBUG oslo_vmware.api [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Waiting for the task: (returnval){ [ 1589.787942] env[63024]: value = "task-1950426" [ 1589.787942] env[63024]: _type = "Task" [ 1589.787942] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.797727] env[63024]: DEBUG oslo_vmware.api [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Task: {'id': task-1950426, 'name': Rename_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.824239] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4d31c815-cef6-4d7c-ba79-0fc82737621e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Creating linked-clone VM from snapshot {{(pid=63024) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1589.825197] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-8af73535-7cc9-47a8-ab5d-4df041b21929 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1589.833744] env[63024]: DEBUG oslo_vmware.api [None req-4d31c815-cef6-4d7c-ba79-0fc82737621e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Waiting for the task: (returnval){ [ 1589.833744] env[63024]: value = "task-1950427" [ 1589.833744] env[63024]: _type = "Task" [ 1589.833744] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1589.843670] env[63024]: DEBUG oslo_vmware.api [None req-4d31c815-cef6-4d7c-ba79-0fc82737621e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950427, 'name': CloneVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1589.853886] env[63024]: DEBUG nova.compute.manager [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1590.056019] env[63024]: DEBUG oslo_vmware.api [None req-c48082a4-eda9-4a2f-b335-d346cf978e89 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950423, 'name': Destroy_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.074259] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 9cf45c3a-2a74-4f8e-8817-47bbd748a44b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1590.135637] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Acquiring lock "refresh_cache-04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1590.135796] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Acquired lock "refresh_cache-04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1590.135981] env[63024]: DEBUG nova.network.neutron [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1590.179430] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950420, 'name': CreateVM_Task, 'duration_secs': 1.559429} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1590.179652] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1590.180327] env[63024]: DEBUG oslo_concurrency.lockutils [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1590.180595] env[63024]: DEBUG oslo_concurrency.lockutils [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1590.180949] env[63024]: DEBUG oslo_concurrency.lockutils [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1590.181056] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f426517f-df6a-4c17-9f61-e4182f6db95e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.186077] env[63024]: DEBUG oslo_vmware.api [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Waiting for the task: (returnval){ [ 1590.186077] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b62bad-8a94-2796-e071-38d0593719b2" [ 1590.186077] env[63024]: _type = "Task" [ 1590.186077] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1590.197696] env[63024]: DEBUG oslo_vmware.api [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b62bad-8a94-2796-e071-38d0593719b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.287321] env[63024]: DEBUG oslo_vmware.api [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Task: {'id': task-1950425, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.297636] env[63024]: DEBUG oslo_vmware.api [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Task: {'id': task-1950426, 'name': Rename_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.344320] env[63024]: DEBUG oslo_vmware.api [None req-4d31c815-cef6-4d7c-ba79-0fc82737621e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950427, 'name': CloneVM_Task} progress is 93%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.381202] env[63024]: DEBUG oslo_concurrency.lockutils [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1590.439433] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "e8ad74ce-7862-4574-98e7-14bc54bd5d6c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1590.439720] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "e8ad74ce-7862-4574-98e7-14bc54bd5d6c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1590.567804] env[63024]: DEBUG oslo_vmware.api [None req-c48082a4-eda9-4a2f-b335-d346cf978e89 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950423, 'name': Destroy_Task, 'duration_secs': 1.394394} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1590.569416] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-c48082a4-eda9-4a2f-b335-d346cf978e89 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Destroyed the VM [ 1590.569416] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c48082a4-eda9-4a2f-b335-d346cf978e89 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Deleting Snapshot of the VM instance {{(pid=63024) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1590.569569] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-1eb38f83-90aa-4128-a5d1-3cc12aad4ffb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.578122] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance d49eae54-cccb-4281-aaa0-d6974529eb7b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1590.578122] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Migration 85a4ec44-899c-4937-b93d-0eaabd8ff03f is active on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 192, 'DISK_GB': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1590.578122] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1590.578122] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Total usable vcpus: 48, total allocated vcpus: 13 {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1590.578417] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3008MB phys_disk=200GB used_disk=13GB total_vcpus=48 used_vcpus=13 pci_stats=[] {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1590.582360] env[63024]: DEBUG oslo_vmware.api [None req-c48082a4-eda9-4a2f-b335-d346cf978e89 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1590.582360] env[63024]: value = "task-1950428" [ 1590.582360] env[63024]: _type = "Task" [ 1590.582360] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1590.593868] env[63024]: DEBUG oslo_vmware.api [None req-c48082a4-eda9-4a2f-b335-d346cf978e89 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950428, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.700032] env[63024]: DEBUG oslo_vmware.api [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b62bad-8a94-2796-e071-38d0593719b2, 'name': SearchDatastore_Task, 'duration_secs': 0.018303} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1590.700362] env[63024]: DEBUG oslo_concurrency.lockutils [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1590.700595] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1590.700821] env[63024]: DEBUG oslo_concurrency.lockutils [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1590.700964] env[63024]: DEBUG oslo_concurrency.lockutils [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1590.701155] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1590.701418] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d93eaada-5860-496d-97af-6e1a044cb02e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.712656] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1590.712856] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1590.713598] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6803b855-698d-436e-9f95-9b5f3036fa8d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.721684] env[63024]: DEBUG oslo_vmware.api [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Waiting for the task: (returnval){ [ 1590.721684] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]523f2939-0b62-0b96-5f27-e2d2132ef5e0" [ 1590.721684] env[63024]: _type = "Task" [ 1590.721684] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1590.730996] env[63024]: DEBUG oslo_vmware.api [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]523f2939-0b62-0b96-5f27-e2d2132ef5e0, 'name': SearchDatastore_Task, 'duration_secs': 0.008657} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1590.731647] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b555ad13-78c6-4389-b777-6ad4aec0dc3c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.739624] env[63024]: DEBUG oslo_vmware.api [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Waiting for the task: (returnval){ [ 1590.739624] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52c28f4d-1b29-74b8-ae5a-7882f3a617f1" [ 1590.739624] env[63024]: _type = "Task" [ 1590.739624] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1590.743559] env[63024]: DEBUG nova.network.neutron [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1590.750931] env[63024]: DEBUG oslo_vmware.api [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52c28f4d-1b29-74b8-ae5a-7882f3a617f1, 'name': SearchDatastore_Task, 'duration_secs': 0.008118} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1590.754189] env[63024]: DEBUG nova.network.neutron [req-f396fed8-e73e-4624-8bd0-126dc2e59c55 req-dda8d1b8-d891-4a1a-bdf1-e2fe7d127844 service nova] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Updated VIF entry in instance network info cache for port de190f04-f1aa-479a-b49d-4cf36ac4475f. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1590.754531] env[63024]: DEBUG nova.network.neutron [req-f396fed8-e73e-4624-8bd0-126dc2e59c55 req-dda8d1b8-d891-4a1a-bdf1-e2fe7d127844 service nova] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Updating instance_info_cache with network_info: [{"id": "de190f04-f1aa-479a-b49d-4cf36ac4475f", "address": "fa:16:3e:a0:ae:e0", "network": {"id": "13eec5ec-8470-475f-803f-c9b5b84898ef", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1638080157-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "142b37c08dcc40e5ab889fa485953fab", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "92e4d027-e755-417b-8eea-9a8f24b85140", "external-id": "nsx-vlan-transportzone-756", "segmentation_id": 756, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapde190f04-f1", "ovs_interfaceid": "de190f04-f1aa-479a-b49d-4cf36ac4475f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1590.755709] env[63024]: DEBUG oslo_concurrency.lockutils [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1590.756263] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 6e477ec2-9270-42b1-85bd-a315460d9cab/6e477ec2-9270-42b1-85bd-a315460d9cab.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1590.756566] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-62fe1b26-b08c-47a5-b4fb-f044e677847d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.764852] env[63024]: DEBUG oslo_vmware.api [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Waiting for the task: (returnval){ [ 1590.764852] env[63024]: value = "task-1950429" [ 1590.764852] env[63024]: _type = "Task" [ 1590.764852] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1590.778304] env[63024]: DEBUG oslo_vmware.api [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Task: {'id': task-1950429, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.789512] env[63024]: DEBUG oslo_vmware.api [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Task: {'id': task-1950425, 'name': ReconfigVM_Task, 'duration_secs': 0.961658} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1590.793304] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] Reconfigured VM instance instance-0000000c to attach disk [datastore1] de31255d-b82f-4f32-82b2-0a8368fe2510/de31255d-b82f-4f32-82b2-0a8368fe2510.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1590.793950] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-de380edf-5924-40ac-ade6-d894258bd859 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.800782] env[63024]: DEBUG oslo_vmware.api [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Task: {'id': task-1950426, 'name': Rename_Task, 'duration_secs': 0.924152} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1590.802144] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1590.802467] env[63024]: DEBUG oslo_vmware.api [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Waiting for the task: (returnval){ [ 1590.802467] env[63024]: value = "task-1950430" [ 1590.802467] env[63024]: _type = "Task" [ 1590.802467] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1590.804917] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9de29481-a175-4296-94e8-c8faf4f1aa55 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1590.815243] env[63024]: DEBUG oslo_vmware.api [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Task: {'id': task-1950430, 'name': Rename_Task} progress is 10%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.816568] env[63024]: DEBUG oslo_vmware.api [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Waiting for the task: (returnval){ [ 1590.816568] env[63024]: value = "task-1950431" [ 1590.816568] env[63024]: _type = "Task" [ 1590.816568] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1590.831301] env[63024]: DEBUG oslo_vmware.api [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Task: {'id': task-1950431, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.847273] env[63024]: DEBUG oslo_vmware.api [None req-4d31c815-cef6-4d7c-ba79-0fc82737621e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950427, 'name': CloneVM_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1590.965513] env[63024]: DEBUG nova.compute.manager [req-f671b784-73bf-4e39-ad66-f41fa2dccaad req-cf644922-677f-43e1-9a8c-f9c2278e0b52 service nova] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Received event network-vif-plugged-879d1c91-c785-4da7-852e-abd159810127 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1590.965815] env[63024]: DEBUG oslo_concurrency.lockutils [req-f671b784-73bf-4e39-ad66-f41fa2dccaad req-cf644922-677f-43e1-9a8c-f9c2278e0b52 service nova] Acquiring lock "04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1590.965815] env[63024]: DEBUG oslo_concurrency.lockutils [req-f671b784-73bf-4e39-ad66-f41fa2dccaad req-cf644922-677f-43e1-9a8c-f9c2278e0b52 service nova] Lock "04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1590.966107] env[63024]: DEBUG oslo_concurrency.lockutils [req-f671b784-73bf-4e39-ad66-f41fa2dccaad req-cf644922-677f-43e1-9a8c-f9c2278e0b52 service nova] Lock "04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1590.966888] env[63024]: DEBUG nova.compute.manager [req-f671b784-73bf-4e39-ad66-f41fa2dccaad req-cf644922-677f-43e1-9a8c-f9c2278e0b52 service nova] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] No waiting events found dispatching network-vif-plugged-879d1c91-c785-4da7-852e-abd159810127 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1590.966888] env[63024]: WARNING nova.compute.manager [req-f671b784-73bf-4e39-ad66-f41fa2dccaad req-cf644922-677f-43e1-9a8c-f9c2278e0b52 service nova] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Received unexpected event network-vif-plugged-879d1c91-c785-4da7-852e-abd159810127 for instance with vm_state building and task_state spawning. [ 1590.966888] env[63024]: DEBUG nova.compute.manager [req-f671b784-73bf-4e39-ad66-f41fa2dccaad req-cf644922-677f-43e1-9a8c-f9c2278e0b52 service nova] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Received event network-changed-879d1c91-c785-4da7-852e-abd159810127 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1590.966888] env[63024]: DEBUG nova.compute.manager [req-f671b784-73bf-4e39-ad66-f41fa2dccaad req-cf644922-677f-43e1-9a8c-f9c2278e0b52 service nova] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Refreshing instance network info cache due to event network-changed-879d1c91-c785-4da7-852e-abd159810127. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1590.966888] env[63024]: DEBUG oslo_concurrency.lockutils [req-f671b784-73bf-4e39-ad66-f41fa2dccaad req-cf644922-677f-43e1-9a8c-f9c2278e0b52 service nova] Acquiring lock "refresh_cache-04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1591.011560] env[63024]: DEBUG nova.network.neutron [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Updating instance_info_cache with network_info: [{"id": "879d1c91-c785-4da7-852e-abd159810127", "address": "fa:16:3e:d0:a4:cd", "network": {"id": "534b669b-9055-4679-81c0-841e6bd2ca48", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-851747787-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "851e1e0d455e4f67ba4bfc3e87eca7f7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "863474bc-a24a-4823-828c-580a187829e3", "external-id": "nsx-vlan-transportzone-617", "segmentation_id": 617, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap879d1c91-c7", "ovs_interfaceid": "879d1c91-c785-4da7-852e-abd159810127", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1591.060068] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b58adde4-e62b-4730-83c1-896c12b84b6b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.070021] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e8b1b06-dff6-468a-91b1-e252f577d0b0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.112458] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07479c43-3bbc-48b6-915e-6d587e4075e5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.126700] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a84b024-5846-4686-b3c1-54e0eea1a821 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.131509] env[63024]: DEBUG oslo_vmware.api [None req-c48082a4-eda9-4a2f-b335-d346cf978e89 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950428, 'name': RemoveSnapshot_Task, 'duration_secs': 0.389508} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1591.131870] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c48082a4-eda9-4a2f-b335-d346cf978e89 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Deleted Snapshot of the VM instance {{(pid=63024) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1591.132210] env[63024]: INFO nova.compute.manager [None req-c48082a4-eda9-4a2f-b335-d346cf978e89 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Took 18.81 seconds to snapshot the instance on the hypervisor. [ 1591.149310] env[63024]: DEBUG nova.compute.provider_tree [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1591.260348] env[63024]: DEBUG oslo_concurrency.lockutils [req-f396fed8-e73e-4624-8bd0-126dc2e59c55 req-dda8d1b8-d891-4a1a-bdf1-e2fe7d127844 service nova] Releasing lock "refresh_cache-6e477ec2-9270-42b1-85bd-a315460d9cab" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1591.275798] env[63024]: DEBUG oslo_vmware.api [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Task: {'id': task-1950429, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.317219] env[63024]: DEBUG oslo_vmware.api [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Task: {'id': task-1950430, 'name': Rename_Task, 'duration_secs': 0.158079} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1591.317649] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1591.321252] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-91b82e6e-5195-453d-af60-180190eb7d7d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.328285] env[63024]: DEBUG oslo_vmware.api [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Task: {'id': task-1950431, 'name': PowerOnVM_Task} progress is 71%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.329581] env[63024]: DEBUG oslo_vmware.api [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Waiting for the task: (returnval){ [ 1591.329581] env[63024]: value = "task-1950432" [ 1591.329581] env[63024]: _type = "Task" [ 1591.329581] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1591.336762] env[63024]: DEBUG oslo_vmware.api [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Task: {'id': task-1950432, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.344495] env[63024]: DEBUG oslo_vmware.api [None req-4d31c815-cef6-4d7c-ba79-0fc82737621e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950427, 'name': CloneVM_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.517735] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Releasing lock "refresh_cache-04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1591.517998] env[63024]: DEBUG nova.compute.manager [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Instance network_info: |[{"id": "879d1c91-c785-4da7-852e-abd159810127", "address": "fa:16:3e:d0:a4:cd", "network": {"id": "534b669b-9055-4679-81c0-841e6bd2ca48", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-851747787-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "851e1e0d455e4f67ba4bfc3e87eca7f7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "863474bc-a24a-4823-828c-580a187829e3", "external-id": "nsx-vlan-transportzone-617", "segmentation_id": 617, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap879d1c91-c7", "ovs_interfaceid": "879d1c91-c785-4da7-852e-abd159810127", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1591.518321] env[63024]: DEBUG oslo_concurrency.lockutils [req-f671b784-73bf-4e39-ad66-f41fa2dccaad req-cf644922-677f-43e1-9a8c-f9c2278e0b52 service nova] Acquired lock "refresh_cache-04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1591.518498] env[63024]: DEBUG nova.network.neutron [req-f671b784-73bf-4e39-ad66-f41fa2dccaad req-cf644922-677f-43e1-9a8c-f9c2278e0b52 service nova] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Refreshing network info cache for port 879d1c91-c785-4da7-852e-abd159810127 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1591.519806] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d0:a4:cd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '863474bc-a24a-4823-828c-580a187829e3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '879d1c91-c785-4da7-852e-abd159810127', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1591.527238] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Creating folder: Project (851e1e0d455e4f67ba4bfc3e87eca7f7). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1591.527762] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c62a3154-3626-4be8-8712-e7c936224e7f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.538845] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Created folder: Project (851e1e0d455e4f67ba4bfc3e87eca7f7) in parent group-v401959. [ 1591.539039] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Creating folder: Instances. Parent ref: group-v402003. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1591.539282] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8d19e956-7f18-4ade-a69e-be5400fce3c1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.548604] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Created folder: Instances in parent group-v402003. [ 1591.548871] env[63024]: DEBUG oslo.service.loopingcall [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1591.549029] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1591.549242] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9efad0b0-6c66-4241-958b-a05b1737b76c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.568659] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1591.568659] env[63024]: value = "task-1950435" [ 1591.568659] env[63024]: _type = "Task" [ 1591.568659] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1591.576625] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950435, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.656038] env[63024]: DEBUG nova.scheduler.client.report [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1591.776957] env[63024]: DEBUG oslo_vmware.api [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Task: {'id': task-1950429, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.554935} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1591.778052] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 6e477ec2-9270-42b1-85bd-a315460d9cab/6e477ec2-9270-42b1-85bd-a315460d9cab.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1591.778052] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1591.778052] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c14480c4-0065-403d-ae24-f1720de75122 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.784791] env[63024]: DEBUG oslo_vmware.api [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Waiting for the task: (returnval){ [ 1591.784791] env[63024]: value = "task-1950436" [ 1591.784791] env[63024]: _type = "Task" [ 1591.784791] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1591.799083] env[63024]: DEBUG oslo_vmware.api [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Task: {'id': task-1950436, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.832507] env[63024]: DEBUG oslo_vmware.api [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Task: {'id': task-1950431, 'name': PowerOnVM_Task, 'duration_secs': 0.830812} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1591.837796] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1591.838048] env[63024]: INFO nova.compute.manager [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Took 15.02 seconds to spawn the instance on the hypervisor. [ 1591.838277] env[63024]: DEBUG nova.compute.manager [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1591.839377] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32de6f6a-9c9c-44a4-9884-49645638a991 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.853524] env[63024]: DEBUG oslo_vmware.api [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Task: {'id': task-1950432, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1591.857952] env[63024]: DEBUG oslo_vmware.api [None req-4d31c815-cef6-4d7c-ba79-0fc82737621e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950427, 'name': CloneVM_Task, 'duration_secs': 1.722951} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1591.861646] env[63024]: INFO nova.virt.vmwareapi.vmops [None req-4d31c815-cef6-4d7c-ba79-0fc82737621e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Created linked-clone VM from snapshot [ 1591.865020] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db583561-5949-4978-b129-85842c96393b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.870987] env[63024]: DEBUG nova.virt.vmwareapi.images [None req-4d31c815-cef6-4d7c-ba79-0fc82737621e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Uploading image e81db031-aecd-4d46-b472-c5cb3d8c71b0 {{(pid=63024) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1591.905568] env[63024]: DEBUG oslo_vmware.rw_handles [None req-4d31c815-cef6-4d7c-ba79-0fc82737621e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1591.905568] env[63024]: value = "vm-402002" [ 1591.905568] env[63024]: _type = "VirtualMachine" [ 1591.905568] env[63024]: }. {{(pid=63024) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1591.907226] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-6930b980-c21d-4410-8dae-9a255deca606 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1591.916020] env[63024]: DEBUG oslo_vmware.rw_handles [None req-4d31c815-cef6-4d7c-ba79-0fc82737621e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Lease: (returnval){ [ 1591.916020] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52cd8558-f78e-0e98-3e88-574466a10c1a" [ 1591.916020] env[63024]: _type = "HttpNfcLease" [ 1591.916020] env[63024]: } obtained for exporting VM: (result){ [ 1591.916020] env[63024]: value = "vm-402002" [ 1591.916020] env[63024]: _type = "VirtualMachine" [ 1591.916020] env[63024]: }. {{(pid=63024) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1591.916020] env[63024]: DEBUG oslo_vmware.api [None req-4d31c815-cef6-4d7c-ba79-0fc82737621e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Waiting for the lease: (returnval){ [ 1591.916020] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52cd8558-f78e-0e98-3e88-574466a10c1a" [ 1591.916020] env[63024]: _type = "HttpNfcLease" [ 1591.916020] env[63024]: } to be ready. {{(pid=63024) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1591.923461] env[63024]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1591.923461] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52cd8558-f78e-0e98-3e88-574466a10c1a" [ 1591.923461] env[63024]: _type = "HttpNfcLease" [ 1591.923461] env[63024]: } is initializing. {{(pid=63024) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1592.078415] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950435, 'name': CreateVM_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1592.159653] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63024) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1592.160089] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 6.697s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1592.163408] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.098s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1592.165122] env[63024]: INFO nova.compute.claims [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1592.294431] env[63024]: DEBUG oslo_vmware.api [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Task: {'id': task-1950436, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06848} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1592.294731] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1592.295568] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5918b7cd-1cca-4383-a9f4-0cb2f6c75965 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.328253] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Reconfiguring VM instance instance-0000000d to attach disk [datastore1] 6e477ec2-9270-42b1-85bd-a315460d9cab/6e477ec2-9270-42b1-85bd-a315460d9cab.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1592.331302] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8c67be8b-5730-4d52-80d8-3f38ed20f218 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.360754] env[63024]: DEBUG oslo_vmware.api [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Task: {'id': task-1950432, 'name': PowerOnVM_Task, 'duration_secs': 0.767983} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1592.362067] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1592.362274] env[63024]: INFO nova.compute.manager [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] Took 11.87 seconds to spawn the instance on the hypervisor. [ 1592.362475] env[63024]: DEBUG nova.compute.manager [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1592.366653] env[63024]: DEBUG oslo_vmware.api [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Waiting for the task: (returnval){ [ 1592.366653] env[63024]: value = "task-1950438" [ 1592.366653] env[63024]: _type = "Task" [ 1592.366653] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1592.370545] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5210fb14-e0e4-4c16-9bee-f5559b6dfc77 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.375211] env[63024]: INFO nova.compute.manager [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Took 23.34 seconds to build instance. [ 1592.385885] env[63024]: DEBUG oslo_vmware.api [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Task: {'id': task-1950438, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1592.425377] env[63024]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1592.425377] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52cd8558-f78e-0e98-3e88-574466a10c1a" [ 1592.425377] env[63024]: _type = "HttpNfcLease" [ 1592.425377] env[63024]: } is ready. {{(pid=63024) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1592.425377] env[63024]: DEBUG oslo_vmware.rw_handles [None req-4d31c815-cef6-4d7c-ba79-0fc82737621e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1592.425377] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52cd8558-f78e-0e98-3e88-574466a10c1a" [ 1592.425377] env[63024]: _type = "HttpNfcLease" [ 1592.425377] env[63024]: }. {{(pid=63024) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1592.425711] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-315a60ee-1a1d-4d26-b2ce-7dc74156fe9b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.434207] env[63024]: DEBUG oslo_vmware.rw_handles [None req-4d31c815-cef6-4d7c-ba79-0fc82737621e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52059687-d3e9-60dc-0274-d7847d0f2d1b/disk-0.vmdk from lease info. {{(pid=63024) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1592.434391] env[63024]: DEBUG oslo_vmware.rw_handles [None req-4d31c815-cef6-4d7c-ba79-0fc82737621e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52059687-d3e9-60dc-0274-d7847d0f2d1b/disk-0.vmdk for reading. {{(pid=63024) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1592.494448] env[63024]: DEBUG nova.network.neutron [req-f671b784-73bf-4e39-ad66-f41fa2dccaad req-cf644922-677f-43e1-9a8c-f9c2278e0b52 service nova] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Updated VIF entry in instance network info cache for port 879d1c91-c785-4da7-852e-abd159810127. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1592.494448] env[63024]: DEBUG nova.network.neutron [req-f671b784-73bf-4e39-ad66-f41fa2dccaad req-cf644922-677f-43e1-9a8c-f9c2278e0b52 service nova] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Updating instance_info_cache with network_info: [{"id": "879d1c91-c785-4da7-852e-abd159810127", "address": "fa:16:3e:d0:a4:cd", "network": {"id": "534b669b-9055-4679-81c0-841e6bd2ca48", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-851747787-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "851e1e0d455e4f67ba4bfc3e87eca7f7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "863474bc-a24a-4823-828c-580a187829e3", "external-id": "nsx-vlan-transportzone-617", "segmentation_id": 617, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap879d1c91-c7", "ovs_interfaceid": "879d1c91-c785-4da7-852e-abd159810127", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1592.500976] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Acquiring lock "00e925a1-9b79-46e2-b7f7-c0b63e1e72df" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1592.500976] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Lock "00e925a1-9b79-46e2-b7f7-c0b63e1e72df" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1592.555655] env[63024]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-7a43f97e-b381-423c-ad9d-b28efa5d85fb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.583645] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950435, 'name': CreateVM_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1592.751480] env[63024]: DEBUG nova.compute.manager [None req-1af3391a-e282-47a1-ad2e-a26f6a8dc830 tempest-ServerDiagnosticsV248Test-2111448382 tempest-ServerDiagnosticsV248Test-2111448382-project-admin] [instance: 51532b8e-4adf-4cc7-b91e-885d7934a7e8] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1592.752955] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86a81d5c-8630-4f76-a361-a7b661a4b1e9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.760565] env[63024]: INFO nova.compute.manager [None req-1af3391a-e282-47a1-ad2e-a26f6a8dc830 tempest-ServerDiagnosticsV248Test-2111448382 tempest-ServerDiagnosticsV248Test-2111448382-project-admin] [instance: 51532b8e-4adf-4cc7-b91e-885d7934a7e8] Retrieving diagnostics [ 1592.761382] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e147a68-76d6-482d-b825-cb493d5d5c9c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1592.877412] env[63024]: DEBUG oslo_concurrency.lockutils [None req-228e2cb7-67a4-4a40-b23b-8468bad58c3a tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Lock "7146277f-2621-4e8f-a14c-49bf4dd052db" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.850s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1592.885189] env[63024]: DEBUG oslo_vmware.api [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Task: {'id': task-1950438, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1592.895262] env[63024]: INFO nova.compute.manager [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] Took 23.26 seconds to build instance. [ 1593.000656] env[63024]: DEBUG oslo_concurrency.lockutils [req-f671b784-73bf-4e39-ad66-f41fa2dccaad req-cf644922-677f-43e1-9a8c-f9c2278e0b52 service nova] Releasing lock "refresh_cache-04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1593.080748] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950435, 'name': CreateVM_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1593.166774] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1593.167846] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1593.168520] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Starting heal instance info cache {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10258}} [ 1593.168651] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Rebuilding the list of instances to heal {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10262}} [ 1593.383272] env[63024]: DEBUG nova.compute.manager [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1593.392230] env[63024]: DEBUG oslo_vmware.api [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Task: {'id': task-1950438, 'name': ReconfigVM_Task, 'duration_secs': 0.71386} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1593.392709] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Reconfigured VM instance instance-0000000d to attach disk [datastore1] 6e477ec2-9270-42b1-85bd-a315460d9cab/6e477ec2-9270-42b1-85bd-a315460d9cab.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1593.393389] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8def68de-bd32-492c-a1d4-8c93ff802807 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.396813] env[63024]: DEBUG oslo_concurrency.lockutils [None req-bbd67a4c-deb6-462d-babb-49808beaa544 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Lock "de31255d-b82f-4f32-82b2-0a8368fe2510" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.771s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1593.399822] env[63024]: DEBUG oslo_vmware.api [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Waiting for the task: (returnval){ [ 1593.399822] env[63024]: value = "task-1950439" [ 1593.399822] env[63024]: _type = "Task" [ 1593.399822] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1593.418949] env[63024]: DEBUG oslo_vmware.api [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Task: {'id': task-1950439, 'name': Rename_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1593.587980] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950435, 'name': CreateVM_Task, 'duration_secs': 1.773871} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1593.588223] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1593.588902] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1593.589075] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1593.589447] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1593.589828] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43774ab1-4d14-4cde-b547-637355501c8c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.602460] env[63024]: DEBUG oslo_vmware.api [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Waiting for the task: (returnval){ [ 1593.602460] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]523c717b-3a15-8232-43a7-1bcd626a0268" [ 1593.602460] env[63024]: _type = "Task" [ 1593.602460] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1593.618086] env[63024]: DEBUG oslo_vmware.api [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]523c717b-3a15-8232-43a7-1bcd626a0268, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1593.677575] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Skipping network cache update for instance because it is Building. {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10271}} [ 1593.678352] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Skipping network cache update for instance because it is Building. {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10271}} [ 1593.678352] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Skipping network cache update for instance because it is Building. {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10271}} [ 1593.694272] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37000680-9e2f-40ec-a628-d474bf52ccae {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.702310] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4684028d-b0cc-4304-9c6d-5b1a8f40a52f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.737213] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8de02dc-70dc-4feb-b3a7-f4c73549a753 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.745595] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c4cf56c-6efd-4293-861e-45f75de95508 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.760664] env[63024]: DEBUG nova.compute.provider_tree [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1593.775833] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "refresh_cache-b0b4d94c-cd5c-4452-baa6-9aeec46b43ad" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1593.776157] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquired lock "refresh_cache-b0b4d94c-cd5c-4452-baa6-9aeec46b43ad" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1593.776402] env[63024]: DEBUG nova.network.neutron [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Forcefully refreshing network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1593.776663] env[63024]: DEBUG nova.objects.instance [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lazy-loading 'info_cache' on Instance uuid b0b4d94c-cd5c-4452-baa6-9aeec46b43ad {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1593.903035] env[63024]: DEBUG nova.compute.manager [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1593.914720] env[63024]: DEBUG oslo_concurrency.lockutils [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1593.919557] env[63024]: DEBUG oslo_vmware.api [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Task: {'id': task-1950439, 'name': Rename_Task, 'duration_secs': 0.260488} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1593.919886] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1593.920539] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dd263cb8-a59b-4c13-b284-f30d01df9588 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.926780] env[63024]: DEBUG oslo_vmware.api [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Waiting for the task: (returnval){ [ 1593.926780] env[63024]: value = "task-1950440" [ 1593.926780] env[63024]: _type = "Task" [ 1593.926780] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1593.935088] env[63024]: DEBUG oslo_vmware.api [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Task: {'id': task-1950440, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.114034] env[63024]: DEBUG oslo_vmware.api [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]523c717b-3a15-8232-43a7-1bcd626a0268, 'name': SearchDatastore_Task, 'duration_secs': 0.015775} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1594.114034] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1594.114034] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1594.114491] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1594.114491] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1594.114491] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1594.114802] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ac60905d-ff44-47ae-b139-3035be628e47 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.127244] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1594.127244] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1594.127244] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d23bc833-7b0c-46db-861e-9c79b9fb4c31 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.131511] env[63024]: DEBUG oslo_vmware.api [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Waiting for the task: (returnval){ [ 1594.131511] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]521636c3-2727-a542-6d4d-9f1f66b4a7c6" [ 1594.131511] env[63024]: _type = "Task" [ 1594.131511] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1594.141906] env[63024]: DEBUG oslo_vmware.api [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]521636c3-2727-a542-6d4d-9f1f66b4a7c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.289431] env[63024]: ERROR nova.scheduler.client.report [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [req-957aefba-ff5d-4951-867a-badf19facb07] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 89dfa68a-133e-436f-a9f1-86051f9fb96b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-957aefba-ff5d-4951-867a-badf19facb07"}]} [ 1594.310033] env[63024]: DEBUG nova.scheduler.client.report [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Refreshing inventories for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1594.324992] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6109e178-1053-4869-b98a-483fd65fc66e tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquiring lock "b0b4d94c-cd5c-4452-baa6-9aeec46b43ad" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1594.325274] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6109e178-1053-4869-b98a-483fd65fc66e tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "b0b4d94c-cd5c-4452-baa6-9aeec46b43ad" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1594.325667] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6109e178-1053-4869-b98a-483fd65fc66e tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquiring lock "b0b4d94c-cd5c-4452-baa6-9aeec46b43ad-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1594.325667] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6109e178-1053-4869-b98a-483fd65fc66e tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "b0b4d94c-cd5c-4452-baa6-9aeec46b43ad-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1594.325841] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6109e178-1053-4869-b98a-483fd65fc66e tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "b0b4d94c-cd5c-4452-baa6-9aeec46b43ad-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1594.329898] env[63024]: INFO nova.compute.manager [None req-6109e178-1053-4869-b98a-483fd65fc66e tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Terminating instance [ 1594.338614] env[63024]: DEBUG nova.scheduler.client.report [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Updating ProviderTree inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1594.338614] env[63024]: DEBUG nova.compute.provider_tree [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1594.353454] env[63024]: DEBUG nova.scheduler.client.report [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Refreshing aggregate associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, aggregates: None {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1594.377345] env[63024]: DEBUG nova.scheduler.client.report [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Refreshing trait associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1594.433705] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1594.437142] env[63024]: DEBUG oslo_vmware.api [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Task: {'id': task-1950440, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.646184] env[63024]: DEBUG oslo_vmware.api [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]521636c3-2727-a542-6d4d-9f1f66b4a7c6, 'name': SearchDatastore_Task, 'duration_secs': 0.020632} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1594.647229] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1fbe370-5ade-45f4-9f71-fabb41084256 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.654122] env[63024]: DEBUG oslo_vmware.api [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Waiting for the task: (returnval){ [ 1594.654122] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b588a1-042d-a906-2d9f-c6f51b052159" [ 1594.654122] env[63024]: _type = "Task" [ 1594.654122] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1594.664611] env[63024]: DEBUG oslo_vmware.api [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b588a1-042d-a906-2d9f-c6f51b052159, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.843590] env[63024]: DEBUG nova.compute.manager [None req-6109e178-1053-4869-b98a-483fd65fc66e tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1594.843875] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-6109e178-1053-4869-b98a-483fd65fc66e tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1594.845648] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-901811c6-38a7-4ec9-b33f-bcdb58dec242 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.856526] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-6109e178-1053-4869-b98a-483fd65fc66e tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1594.857887] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f8c08ce7-4a47-4a76-a737-d89e1727eeb1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.860154] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25491ed6-4145-41bc-b57b-c3cac19bd419 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.869833] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e96f6d08-07da-49fb-aa8d-7afbf7e374f1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.907772] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-804de422-0eee-4ce5-aa6e-ce0496b19821 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.916249] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79f35d50-69cd-4770-bac5-946809d5b74e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.935274] env[63024]: DEBUG nova.compute.provider_tree [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1594.946139] env[63024]: DEBUG oslo_vmware.api [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Task: {'id': task-1950440, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.164448] env[63024]: DEBUG oslo_vmware.api [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b588a1-042d-a906-2d9f-c6f51b052159, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.343801] env[63024]: DEBUG oslo_concurrency.lockutils [None req-98bfbb29-77df-4e6c-8842-6813b862a6cb tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Acquiring lock "51532b8e-4adf-4cc7-b91e-885d7934a7e8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1595.343801] env[63024]: DEBUG oslo_concurrency.lockutils [None req-98bfbb29-77df-4e6c-8842-6813b862a6cb tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Lock "51532b8e-4adf-4cc7-b91e-885d7934a7e8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1595.343801] env[63024]: DEBUG oslo_concurrency.lockutils [None req-98bfbb29-77df-4e6c-8842-6813b862a6cb tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Acquiring lock "51532b8e-4adf-4cc7-b91e-885d7934a7e8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1595.343801] env[63024]: DEBUG oslo_concurrency.lockutils [None req-98bfbb29-77df-4e6c-8842-6813b862a6cb tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Lock "51532b8e-4adf-4cc7-b91e-885d7934a7e8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1595.344121] env[63024]: DEBUG oslo_concurrency.lockutils [None req-98bfbb29-77df-4e6c-8842-6813b862a6cb tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Lock "51532b8e-4adf-4cc7-b91e-885d7934a7e8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1595.344696] env[63024]: INFO nova.compute.manager [None req-98bfbb29-77df-4e6c-8842-6813b862a6cb tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] [instance: 51532b8e-4adf-4cc7-b91e-885d7934a7e8] Terminating instance [ 1595.449801] env[63024]: DEBUG oslo_vmware.api [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Task: {'id': task-1950440, 'name': PowerOnVM_Task, 'duration_secs': 1.426356} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1595.450356] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1595.450356] env[63024]: INFO nova.compute.manager [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Took 12.45 seconds to spawn the instance on the hypervisor. [ 1595.453720] env[63024]: DEBUG nova.compute.manager [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1595.455137] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fca16af-22cc-475f-99fb-47912b882e6e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.493246] env[63024]: DEBUG nova.scheduler.client.report [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Updated inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with generation 37 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1595.494814] env[63024]: DEBUG nova.compute.provider_tree [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Updating resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b generation from 37 to 38 during operation: update_inventory {{(pid=63024) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1595.494814] env[63024]: DEBUG nova.compute.provider_tree [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1595.666530] env[63024]: DEBUG oslo_vmware.api [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b588a1-042d-a906-2d9f-c6f51b052159, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1595.849366] env[63024]: DEBUG oslo_concurrency.lockutils [None req-98bfbb29-77df-4e6c-8842-6813b862a6cb tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Acquiring lock "refresh_cache-51532b8e-4adf-4cc7-b91e-885d7934a7e8" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1595.849584] env[63024]: DEBUG oslo_concurrency.lockutils [None req-98bfbb29-77df-4e6c-8842-6813b862a6cb tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Acquired lock "refresh_cache-51532b8e-4adf-4cc7-b91e-885d7934a7e8" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1595.849758] env[63024]: DEBUG nova.network.neutron [None req-98bfbb29-77df-4e6c-8842-6813b862a6cb tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] [instance: 51532b8e-4adf-4cc7-b91e-885d7934a7e8] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1595.871980] env[63024]: DEBUG nova.network.neutron [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Updating instance_info_cache with network_info: [{"id": "07391e4a-67fc-4ff7-8491-8ca4469c68e9", "address": "fa:16:3e:54:a1:06", "network": {"id": "0719de66-1f31-4596-a9a1-11d65b13c2e5", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1221667646-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "99c4328f2c8c4139b4eace4b465e37e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e0c77754-4085-434b-a3e8-d61be099ac67", "external-id": "nsx-vlan-transportzone-822", "segmentation_id": 822, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap07391e4a-67", "ovs_interfaceid": "07391e4a-67fc-4ff7-8491-8ca4469c68e9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1595.978204] env[63024]: INFO nova.compute.manager [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Took 25.72 seconds to build instance. [ 1596.000952] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.837s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1596.001547] env[63024]: DEBUG nova.compute.manager [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1596.005037] env[63024]: DEBUG oslo_concurrency.lockutils [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.672s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1596.008136] env[63024]: INFO nova.compute.claims [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1596.114973] env[63024]: DEBUG nova.compute.manager [req-b8ff5cdc-62bf-487e-91d5-08f8c090f33e req-fc3f784c-80b3-4d6b-bad1-356ca92c1333 service nova] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Received event network-changed-d962584b-9fa7-4c73-b446-b432b537aafd {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1596.115345] env[63024]: DEBUG nova.compute.manager [req-b8ff5cdc-62bf-487e-91d5-08f8c090f33e req-fc3f784c-80b3-4d6b-bad1-356ca92c1333 service nova] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Refreshing instance network info cache due to event network-changed-d962584b-9fa7-4c73-b446-b432b537aafd. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1596.115590] env[63024]: DEBUG oslo_concurrency.lockutils [req-b8ff5cdc-62bf-487e-91d5-08f8c090f33e req-fc3f784c-80b3-4d6b-bad1-356ca92c1333 service nova] Acquiring lock "refresh_cache-e03b8577-9298-4e88-98ea-6258e97db28d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1596.115788] env[63024]: DEBUG oslo_concurrency.lockutils [req-b8ff5cdc-62bf-487e-91d5-08f8c090f33e req-fc3f784c-80b3-4d6b-bad1-356ca92c1333 service nova] Acquired lock "refresh_cache-e03b8577-9298-4e88-98ea-6258e97db28d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1596.116069] env[63024]: DEBUG nova.network.neutron [req-b8ff5cdc-62bf-487e-91d5-08f8c090f33e req-fc3f784c-80b3-4d6b-bad1-356ca92c1333 service nova] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Refreshing network info cache for port d962584b-9fa7-4c73-b446-b432b537aafd {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1596.165536] env[63024]: DEBUG oslo_vmware.api [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b588a1-042d-a906-2d9f-c6f51b052159, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.377516] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Releasing lock "refresh_cache-b0b4d94c-cd5c-4452-baa6-9aeec46b43ad" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1596.377738] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Updated the network info_cache for instance {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10329}} [ 1596.377956] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1596.378600] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1596.378829] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1596.378963] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63024) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10877}} [ 1596.381193] env[63024]: DEBUG nova.network.neutron [None req-98bfbb29-77df-4e6c-8842-6813b862a6cb tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] [instance: 51532b8e-4adf-4cc7-b91e-885d7934a7e8] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1596.477913] env[63024]: DEBUG nova.network.neutron [None req-98bfbb29-77df-4e6c-8842-6813b862a6cb tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] [instance: 51532b8e-4adf-4cc7-b91e-885d7934a7e8] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1596.479802] env[63024]: DEBUG oslo_concurrency.lockutils [None req-15ab0763-395e-4a46-9bc1-81a1b66a45d7 tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Lock "6e477ec2-9270-42b1-85bd-a315460d9cab" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.240s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1596.507073] env[63024]: DEBUG nova.compute.utils [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1596.508798] env[63024]: DEBUG nova.compute.manager [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Not allocating networking since 'none' was specified. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1596.666892] env[63024]: DEBUG oslo_vmware.api [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b588a1-042d-a906-2d9f-c6f51b052159, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1596.729751] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquiring lock "1ad97ed0-2a84-4783-8511-e0f6b24861bd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1596.730197] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "1ad97ed0-2a84-4783-8511-e0f6b24861bd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1596.989166] env[63024]: DEBUG oslo_concurrency.lockutils [None req-98bfbb29-77df-4e6c-8842-6813b862a6cb tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Releasing lock "refresh_cache-51532b8e-4adf-4cc7-b91e-885d7934a7e8" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1596.989166] env[63024]: DEBUG nova.compute.manager [None req-98bfbb29-77df-4e6c-8842-6813b862a6cb tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] [instance: 51532b8e-4adf-4cc7-b91e-885d7934a7e8] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1596.989166] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-98bfbb29-77df-4e6c-8842-6813b862a6cb tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] [instance: 51532b8e-4adf-4cc7-b91e-885d7934a7e8] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1596.989166] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8206d55e-d2c0-428e-a420-3ba5893ac280 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.994313] env[63024]: DEBUG nova.compute.manager [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1597.003537] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-98bfbb29-77df-4e6c-8842-6813b862a6cb tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] [instance: 51532b8e-4adf-4cc7-b91e-885d7934a7e8] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1597.003537] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-de9884e6-4ea5-4f36-920e-cabbae786535 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.009334] env[63024]: DEBUG nova.compute.manager [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1597.015344] env[63024]: DEBUG oslo_vmware.api [None req-98bfbb29-77df-4e6c-8842-6813b862a6cb tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Waiting for the task: (returnval){ [ 1597.015344] env[63024]: value = "task-1950442" [ 1597.015344] env[63024]: _type = "Task" [ 1597.015344] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1597.031530] env[63024]: DEBUG oslo_vmware.api [None req-98bfbb29-77df-4e6c-8842-6813b862a6cb tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Task: {'id': task-1950442, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.132760] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "610dd030-5080-498a-8744-b1411297d70d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1597.132957] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "610dd030-5080-498a-8744-b1411297d70d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1597.172337] env[63024]: DEBUG oslo_vmware.api [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b588a1-042d-a906-2d9f-c6f51b052159, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.339152] env[63024]: DEBUG nova.network.neutron [req-b8ff5cdc-62bf-487e-91d5-08f8c090f33e req-fc3f784c-80b3-4d6b-bad1-356ca92c1333 service nova] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Updated VIF entry in instance network info cache for port d962584b-9fa7-4c73-b446-b432b537aafd. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1597.340177] env[63024]: DEBUG nova.network.neutron [req-b8ff5cdc-62bf-487e-91d5-08f8c090f33e req-fc3f784c-80b3-4d6b-bad1-356ca92c1333 service nova] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Updating instance_info_cache with network_info: [{"id": "d962584b-9fa7-4c73-b446-b432b537aafd", "address": "fa:16:3e:7a:de:28", "network": {"id": "7c2acd24-557d-4fb0-bb17-ae985019dd54", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1770461600-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9860b12ec09944ddacb54f69a18d4c4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7bcd9d2d-25c8-41ad-9a4a-93b9029ba993", "external-id": "nsx-vlan-transportzone-734", "segmentation_id": 734, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd962584b-9f", "ovs_interfaceid": "d962584b-9fa7-4c73-b446-b432b537aafd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1597.525184] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1597.530603] env[63024]: DEBUG oslo_vmware.api [None req-98bfbb29-77df-4e6c-8842-6813b862a6cb tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Task: {'id': task-1950442, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.603682] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44a6c33c-1008-44cd-a2d0-51c1a98863ee {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.612359] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4a4ae6a-7e8d-467f-ab14-0e7754231d36 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.644479] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-864b5052-9713-421b-8190-811afac5b826 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.653266] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eb64474-99e9-4f75-bbb2-3614fd131bee {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.670484] env[63024]: DEBUG nova.compute.provider_tree [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1597.675209] env[63024]: DEBUG oslo_vmware.api [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b588a1-042d-a906-2d9f-c6f51b052159, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.843034] env[63024]: DEBUG oslo_concurrency.lockutils [req-b8ff5cdc-62bf-487e-91d5-08f8c090f33e req-fc3f784c-80b3-4d6b-bad1-356ca92c1333 service nova] Releasing lock "refresh_cache-e03b8577-9298-4e88-98ea-6258e97db28d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1598.026983] env[63024]: DEBUG nova.compute.manager [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1598.041854] env[63024]: DEBUG oslo_vmware.api [None req-98bfbb29-77df-4e6c-8842-6813b862a6cb tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Task: {'id': task-1950442, 'name': PowerOffVM_Task, 'duration_secs': 0.934965} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1598.041854] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-98bfbb29-77df-4e6c-8842-6813b862a6cb tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] [instance: 51532b8e-4adf-4cc7-b91e-885d7934a7e8] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1598.041854] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-98bfbb29-77df-4e6c-8842-6813b862a6cb tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] [instance: 51532b8e-4adf-4cc7-b91e-885d7934a7e8] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1598.041854] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8d7d46c1-1c3e-423e-b6d4-8d9800990843 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.066295] env[63024]: DEBUG nova.virt.hardware [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1598.066830] env[63024]: DEBUG nova.virt.hardware [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1598.067170] env[63024]: DEBUG nova.virt.hardware [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1598.070291] env[63024]: DEBUG nova.virt.hardware [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1598.070291] env[63024]: DEBUG nova.virt.hardware [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1598.070291] env[63024]: DEBUG nova.virt.hardware [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1598.070291] env[63024]: DEBUG nova.virt.hardware [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1598.070291] env[63024]: DEBUG nova.virt.hardware [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1598.070490] env[63024]: DEBUG nova.virt.hardware [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1598.070490] env[63024]: DEBUG nova.virt.hardware [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1598.070490] env[63024]: DEBUG nova.virt.hardware [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1598.070490] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc0a2696-8674-49f6-9227-b3e67ec8735e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.074581] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-98bfbb29-77df-4e6c-8842-6813b862a6cb tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] [instance: 51532b8e-4adf-4cc7-b91e-885d7934a7e8] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1598.074782] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-98bfbb29-77df-4e6c-8842-6813b862a6cb tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] [instance: 51532b8e-4adf-4cc7-b91e-885d7934a7e8] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1598.074957] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-98bfbb29-77df-4e6c-8842-6813b862a6cb tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Deleting the datastore file [datastore1] 51532b8e-4adf-4cc7-b91e-885d7934a7e8 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1598.075608] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-34f025c2-80b0-4317-bbb6-82317b815185 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.080971] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6eabeca-82c2-4586-b1b2-f3df6602a8ac {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.086544] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2da24e96-7f50-431e-9f5c-a2fe4c29f616 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Acquiring lock "de31255d-b82f-4f32-82b2-0a8368fe2510" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1598.086794] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2da24e96-7f50-431e-9f5c-a2fe4c29f616 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Lock "de31255d-b82f-4f32-82b2-0a8368fe2510" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1598.086998] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2da24e96-7f50-431e-9f5c-a2fe4c29f616 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Acquiring lock "de31255d-b82f-4f32-82b2-0a8368fe2510-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1598.087196] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2da24e96-7f50-431e-9f5c-a2fe4c29f616 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Lock "de31255d-b82f-4f32-82b2-0a8368fe2510-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1598.087362] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2da24e96-7f50-431e-9f5c-a2fe4c29f616 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Lock "de31255d-b82f-4f32-82b2-0a8368fe2510-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1598.088986] env[63024]: DEBUG oslo_vmware.api [None req-98bfbb29-77df-4e6c-8842-6813b862a6cb tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Waiting for the task: (returnval){ [ 1598.088986] env[63024]: value = "task-1950444" [ 1598.088986] env[63024]: _type = "Task" [ 1598.088986] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1598.093235] env[63024]: INFO nova.compute.manager [None req-2da24e96-7f50-431e-9f5c-a2fe4c29f616 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] Terminating instance [ 1598.109026] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Instance VIF info [] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1598.110977] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Creating folder: Project (deae6108cc9a42cc839a8b0363934542). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1598.116023] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4c9e5687-7a83-4799-8541-4944d191f96c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.118218] env[63024]: DEBUG oslo_vmware.api [None req-98bfbb29-77df-4e6c-8842-6813b862a6cb tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Task: {'id': task-1950444, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.126598] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Created folder: Project (deae6108cc9a42cc839a8b0363934542) in parent group-v401959. [ 1598.127035] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Creating folder: Instances. Parent ref: group-v402006. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1598.127813] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-13d4bb4e-f57c-4ed6-833e-6d998c9b3628 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.137377] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Created folder: Instances in parent group-v402006. [ 1598.137633] env[63024]: DEBUG oslo.service.loopingcall [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1598.137820] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1598.138089] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6a348b1c-7e93-41ff-ad7b-edb62c1ad8a0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.156193] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1598.156193] env[63024]: value = "task-1950447" [ 1598.156193] env[63024]: _type = "Task" [ 1598.156193] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1598.168861] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950447, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.172679] env[63024]: DEBUG oslo_vmware.api [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b588a1-042d-a906-2d9f-c6f51b052159, 'name': SearchDatastore_Task, 'duration_secs': 3.153275} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1598.173088] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1598.173281] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd/04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1598.173566] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e1e9a80a-87eb-4993-8472-8619a93e11df {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.176852] env[63024]: DEBUG nova.scheduler.client.report [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1598.193804] env[63024]: DEBUG oslo_vmware.api [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Waiting for the task: (returnval){ [ 1598.193804] env[63024]: value = "task-1950448" [ 1598.193804] env[63024]: _type = "Task" [ 1598.193804] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1598.273610] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-6109e178-1053-4869-b98a-483fd65fc66e tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1598.273774] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-6109e178-1053-4869-b98a-483fd65fc66e tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1598.273912] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-6109e178-1053-4869-b98a-483fd65fc66e tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Deleting the datastore file [datastore1] b0b4d94c-cd5c-4452-baa6-9aeec46b43ad {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1598.274317] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-abf03e55-df0d-4ae8-995c-8251cad3da04 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.283282] env[63024]: DEBUG oslo_vmware.api [None req-6109e178-1053-4869-b98a-483fd65fc66e tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1598.283282] env[63024]: value = "task-1950449" [ 1598.283282] env[63024]: _type = "Task" [ 1598.283282] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1598.295771] env[63024]: DEBUG oslo_vmware.api [None req-6109e178-1053-4869-b98a-483fd65fc66e tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950449, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.605116] env[63024]: DEBUG oslo_vmware.api [None req-98bfbb29-77df-4e6c-8842-6813b862a6cb tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Task: {'id': task-1950444, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159586} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1598.605447] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-98bfbb29-77df-4e6c-8842-6813b862a6cb tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1598.605640] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-98bfbb29-77df-4e6c-8842-6813b862a6cb tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] [instance: 51532b8e-4adf-4cc7-b91e-885d7934a7e8] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1598.606085] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-98bfbb29-77df-4e6c-8842-6813b862a6cb tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] [instance: 51532b8e-4adf-4cc7-b91e-885d7934a7e8] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1598.606195] env[63024]: INFO nova.compute.manager [None req-98bfbb29-77df-4e6c-8842-6813b862a6cb tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] [instance: 51532b8e-4adf-4cc7-b91e-885d7934a7e8] Took 1.62 seconds to destroy the instance on the hypervisor. [ 1598.606786] env[63024]: DEBUG oslo.service.loopingcall [None req-98bfbb29-77df-4e6c-8842-6813b862a6cb tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1598.607050] env[63024]: DEBUG nova.compute.manager [-] [instance: 51532b8e-4adf-4cc7-b91e-885d7934a7e8] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1598.607596] env[63024]: DEBUG nova.network.neutron [-] [instance: 51532b8e-4adf-4cc7-b91e-885d7934a7e8] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1598.617049] env[63024]: DEBUG nova.compute.manager [None req-2da24e96-7f50-431e-9f5c-a2fe4c29f616 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1598.617600] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-2da24e96-7f50-431e-9f5c-a2fe4c29f616 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1598.618627] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa9a75da-7382-48d2-b49a-2c0272901f79 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.628574] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-2da24e96-7f50-431e-9f5c-a2fe4c29f616 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1598.629129] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f6ccfc04-aaed-4766-adb6-75f5d570e903 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.636388] env[63024]: DEBUG oslo_vmware.api [None req-2da24e96-7f50-431e-9f5c-a2fe4c29f616 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Waiting for the task: (returnval){ [ 1598.636388] env[63024]: value = "task-1950450" [ 1598.636388] env[63024]: _type = "Task" [ 1598.636388] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1598.641368] env[63024]: DEBUG nova.network.neutron [-] [instance: 51532b8e-4adf-4cc7-b91e-885d7934a7e8] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1598.650701] env[63024]: DEBUG oslo_vmware.api [None req-2da24e96-7f50-431e-9f5c-a2fe4c29f616 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Task: {'id': task-1950450, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.670032] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950447, 'name': CreateVM_Task, 'duration_secs': 0.498777} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1598.670032] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1598.670547] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1598.670722] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1598.671064] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1598.671331] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9747ed47-187e-4196-8848-5cd803146cdf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1598.678126] env[63024]: DEBUG oslo_vmware.api [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Waiting for the task: (returnval){ [ 1598.678126] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5200e61d-c120-74ab-659a-e1ccbd32ac88" [ 1598.678126] env[63024]: _type = "Task" [ 1598.678126] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1598.687987] env[63024]: DEBUG oslo_concurrency.lockutils [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.682s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1598.689482] env[63024]: DEBUG nova.compute.manager [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1598.691474] env[63024]: DEBUG oslo_vmware.api [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5200e61d-c120-74ab-659a-e1ccbd32ac88, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.692337] env[63024]: DEBUG oslo_concurrency.lockutils [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.804s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1598.693802] env[63024]: INFO nova.compute.claims [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1598.709628] env[63024]: DEBUG oslo_vmware.api [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Task: {'id': task-1950448, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.795674] env[63024]: DEBUG oslo_vmware.api [None req-6109e178-1053-4869-b98a-483fd65fc66e tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950449, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.376294} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1598.796104] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-6109e178-1053-4869-b98a-483fd65fc66e tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1598.796400] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-6109e178-1053-4869-b98a-483fd65fc66e tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1598.796674] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-6109e178-1053-4869-b98a-483fd65fc66e tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1598.796941] env[63024]: INFO nova.compute.manager [None req-6109e178-1053-4869-b98a-483fd65fc66e tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Took 3.95 seconds to destroy the instance on the hypervisor. [ 1598.797377] env[63024]: DEBUG oslo.service.loopingcall [None req-6109e178-1053-4869-b98a-483fd65fc66e tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1598.797666] env[63024]: DEBUG nova.compute.manager [-] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1598.797790] env[63024]: DEBUG nova.network.neutron [-] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1598.810965] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cc2cb1e1-5258-4bcd-9b44-4cd5728f9c3a tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Acquiring lock "interface-6e477ec2-9270-42b1-85bd-a315460d9cab-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1598.811309] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cc2cb1e1-5258-4bcd-9b44-4cd5728f9c3a tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Lock "interface-6e477ec2-9270-42b1-85bd-a315460d9cab-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1598.811698] env[63024]: DEBUG nova.objects.instance [None req-cc2cb1e1-5258-4bcd-9b44-4cd5728f9c3a tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Lazy-loading 'flavor' on Instance uuid 6e477ec2-9270-42b1-85bd-a315460d9cab {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1599.145898] env[63024]: DEBUG oslo_vmware.api [None req-2da24e96-7f50-431e-9f5c-a2fe4c29f616 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Task: {'id': task-1950450, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.147317] env[63024]: DEBUG nova.network.neutron [-] [instance: 51532b8e-4adf-4cc7-b91e-885d7934a7e8] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1599.191718] env[63024]: DEBUG oslo_vmware.api [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5200e61d-c120-74ab-659a-e1ccbd32ac88, 'name': SearchDatastore_Task, 'duration_secs': 0.072255} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1599.192439] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1599.192806] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1599.192917] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1599.193155] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1599.193430] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1599.195040] env[63024]: DEBUG nova.compute.utils [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1599.196462] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-17ac0c91-a39e-44cc-a077-e2623fc14c9e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.199982] env[63024]: DEBUG nova.compute.manager [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1599.199982] env[63024]: DEBUG nova.network.neutron [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1599.217205] env[63024]: DEBUG oslo_vmware.api [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Task: {'id': task-1950448, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.791896} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1599.221092] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd/04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1599.221491] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1599.222124] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1599.222749] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1599.224030] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4fc04cd4-3170-472c-b19e-7d3ba1cee5dd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.226903] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4433d87c-d0e4-46e9-ac9b-81effc4d6225 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.235150] env[63024]: DEBUG oslo_vmware.api [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Waiting for the task: (returnval){ [ 1599.235150] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52262232-8679-f446-2e8c-06ee3d7b7a77" [ 1599.235150] env[63024]: _type = "Task" [ 1599.235150] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1599.237078] env[63024]: DEBUG oslo_vmware.api [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Waiting for the task: (returnval){ [ 1599.237078] env[63024]: value = "task-1950451" [ 1599.237078] env[63024]: _type = "Task" [ 1599.237078] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1599.251490] env[63024]: DEBUG oslo_vmware.api [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52262232-8679-f446-2e8c-06ee3d7b7a77, 'name': SearchDatastore_Task, 'duration_secs': 0.014001} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1599.257598] env[63024]: DEBUG oslo_vmware.api [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Task: {'id': task-1950451, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.258886] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3a3ba31-12e5-4bdb-9e99-5598981417f5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.264565] env[63024]: DEBUG oslo_vmware.api [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Waiting for the task: (returnval){ [ 1599.264565] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52ec80fa-6b72-6384-df76-4caebac31781" [ 1599.264565] env[63024]: _type = "Task" [ 1599.264565] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1599.275533] env[63024]: DEBUG oslo_vmware.api [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52ec80fa-6b72-6384-df76-4caebac31781, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.302168] env[63024]: DEBUG nova.policy [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a6b6f1b74f3448f9adde468b3e5c6777', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a7079d038d284096b56aaf3a4f93d1d9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1599.315899] env[63024]: DEBUG nova.objects.instance [None req-cc2cb1e1-5258-4bcd-9b44-4cd5728f9c3a tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Lazy-loading 'pci_requests' on Instance uuid 6e477ec2-9270-42b1-85bd-a315460d9cab {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1599.647393] env[63024]: DEBUG oslo_vmware.api [None req-2da24e96-7f50-431e-9f5c-a2fe4c29f616 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Task: {'id': task-1950450, 'name': PowerOffVM_Task, 'duration_secs': 0.588519} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1599.647716] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-2da24e96-7f50-431e-9f5c-a2fe4c29f616 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1599.648056] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-2da24e96-7f50-431e-9f5c-a2fe4c29f616 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1599.648372] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-681b93e0-6de7-4f29-bd3b-a819f94c1fd8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.650218] env[63024]: INFO nova.compute.manager [-] [instance: 51532b8e-4adf-4cc7-b91e-885d7934a7e8] Took 1.04 seconds to deallocate network for instance. [ 1599.710977] env[63024]: DEBUG nova.compute.manager [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1599.742963] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-2da24e96-7f50-431e-9f5c-a2fe4c29f616 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1599.743179] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-2da24e96-7f50-431e-9f5c-a2fe4c29f616 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1599.743758] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-2da24e96-7f50-431e-9f5c-a2fe4c29f616 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Deleting the datastore file [datastore1] de31255d-b82f-4f32-82b2-0a8368fe2510 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1599.747799] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d5f9cbdd-7816-4fb9-b67b-9b073ee55351 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.751847] env[63024]: DEBUG nova.network.neutron [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Successfully created port: bd3ccdb3-eda1-48fc-9c1d-6e0e7820bcb8 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1599.762067] env[63024]: DEBUG oslo_vmware.api [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Task: {'id': task-1950451, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.150168} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1599.764116] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1599.764833] env[63024]: DEBUG oslo_vmware.api [None req-2da24e96-7f50-431e-9f5c-a2fe4c29f616 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Waiting for the task: (returnval){ [ 1599.764833] env[63024]: value = "task-1950453" [ 1599.764833] env[63024]: _type = "Task" [ 1599.764833] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1599.765714] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a5947fd-93f4-4f62-81db-ac56ae31bd9a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.783868] env[63024]: DEBUG oslo_vmware.api [None req-2da24e96-7f50-431e-9f5c-a2fe4c29f616 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Task: {'id': task-1950453, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.808033] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Reconfiguring VM instance instance-0000000e to attach disk [datastore1] 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd/04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1599.808563] env[63024]: DEBUG oslo_vmware.api [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52ec80fa-6b72-6384-df76-4caebac31781, 'name': SearchDatastore_Task, 'duration_secs': 0.018013} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1599.811781] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-390dfae4-cfdb-4ec5-938f-eaddf2bc178e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.827532] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1599.827936] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 726d9639-1ab4-46a9-975e-5580c8344a37/726d9639-1ab4-46a9-975e-5580c8344a37.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1599.829264] env[63024]: DEBUG nova.objects.base [None req-cc2cb1e1-5258-4bcd-9b44-4cd5728f9c3a tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Object Instance<6e477ec2-9270-42b1-85bd-a315460d9cab> lazy-loaded attributes: flavor,pci_requests {{(pid=63024) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1599.829264] env[63024]: DEBUG nova.network.neutron [None req-cc2cb1e1-5258-4bcd-9b44-4cd5728f9c3a tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1599.831759] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c2eccc20-307b-40d4-8895-3b31eda54e63 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1599.840261] env[63024]: DEBUG oslo_vmware.api [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Waiting for the task: (returnval){ [ 1599.840261] env[63024]: value = "task-1950454" [ 1599.840261] env[63024]: _type = "Task" [ 1599.840261] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1599.841662] env[63024]: DEBUG oslo_vmware.api [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Waiting for the task: (returnval){ [ 1599.841662] env[63024]: value = "task-1950455" [ 1599.841662] env[63024]: _type = "Task" [ 1599.841662] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1599.857339] env[63024]: DEBUG oslo_vmware.api [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Task: {'id': task-1950454, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.860319] env[63024]: DEBUG oslo_vmware.api [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': task-1950455, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1599.866824] env[63024]: DEBUG nova.network.neutron [-] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1600.041169] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cc2cb1e1-5258-4bcd-9b44-4cd5728f9c3a tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Lock "interface-6e477ec2-9270-42b1-85bd-a315460d9cab-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.229s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1600.156972] env[63024]: DEBUG oslo_concurrency.lockutils [None req-98bfbb29-77df-4e6c-8842-6813b862a6cb tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1600.201375] env[63024]: DEBUG nova.compute.manager [req-90695b33-f407-43ee-9d79-e4c5d065919c req-2e2b5abc-7dcf-48e2-bad1-605c9bfb00f1 service nova] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Received event network-changed-0b9a5894-831a-4645-8fee-497016b36839 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1600.201687] env[63024]: DEBUG nova.compute.manager [req-90695b33-f407-43ee-9d79-e4c5d065919c req-2e2b5abc-7dcf-48e2-bad1-605c9bfb00f1 service nova] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Refreshing instance network info cache due to event network-changed-0b9a5894-831a-4645-8fee-497016b36839. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1600.201901] env[63024]: DEBUG oslo_concurrency.lockutils [req-90695b33-f407-43ee-9d79-e4c5d065919c req-2e2b5abc-7dcf-48e2-bad1-605c9bfb00f1 service nova] Acquiring lock "refresh_cache-7146277f-2621-4e8f-a14c-49bf4dd052db" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1600.203764] env[63024]: DEBUG oslo_concurrency.lockutils [req-90695b33-f407-43ee-9d79-e4c5d065919c req-2e2b5abc-7dcf-48e2-bad1-605c9bfb00f1 service nova] Acquired lock "refresh_cache-7146277f-2621-4e8f-a14c-49bf4dd052db" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1600.203764] env[63024]: DEBUG nova.network.neutron [req-90695b33-f407-43ee-9d79-e4c5d065919c req-2e2b5abc-7dcf-48e2-bad1-605c9bfb00f1 service nova] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Refreshing network info cache for port 0b9a5894-831a-4645-8fee-497016b36839 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1600.290191] env[63024]: DEBUG oslo_vmware.api [None req-2da24e96-7f50-431e-9f5c-a2fe4c29f616 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Task: {'id': task-1950453, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.445938} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1600.290818] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-2da24e96-7f50-431e-9f5c-a2fe4c29f616 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1600.291336] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-2da24e96-7f50-431e-9f5c-a2fe4c29f616 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1600.292820] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-2da24e96-7f50-431e-9f5c-a2fe4c29f616 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1600.293098] env[63024]: INFO nova.compute.manager [None req-2da24e96-7f50-431e-9f5c-a2fe4c29f616 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] Took 1.68 seconds to destroy the instance on the hypervisor. [ 1600.293354] env[63024]: DEBUG oslo.service.loopingcall [None req-2da24e96-7f50-431e-9f5c-a2fe4c29f616 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1600.293857] env[63024]: DEBUG nova.compute.manager [-] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1600.293969] env[63024]: DEBUG nova.network.neutron [-] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1600.305079] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f33189e0-0c79-4398-bfab-35bda660e9ce {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.313062] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e468033-a942-479c-9883-ee0eb810ff66 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.368429] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efb1fa04-b2a2-494d-b87a-d869af48fd5c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.373764] env[63024]: INFO nova.compute.manager [-] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Took 1.58 seconds to deallocate network for instance. [ 1600.380487] env[63024]: DEBUG oslo_vmware.api [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': task-1950455, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.393030] env[63024]: DEBUG oslo_vmware.api [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Task: {'id': task-1950454, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1600.393030] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dec7c91-15d4-4a8e-929e-0256f4d8e782 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.412586] env[63024]: DEBUG nova.compute.provider_tree [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1600.492219] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Acquiring lock "8a826350-0fee-409d-a3fc-260d7d43bdf6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1600.492479] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Lock "8a826350-0fee-409d-a3fc-260d7d43bdf6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1600.730948] env[63024]: DEBUG nova.compute.manager [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1600.759220] env[63024]: DEBUG nova.virt.hardware [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1600.759473] env[63024]: DEBUG nova.virt.hardware [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1600.759811] env[63024]: DEBUG nova.virt.hardware [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1600.760151] env[63024]: DEBUG nova.virt.hardware [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1600.760477] env[63024]: DEBUG nova.virt.hardware [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1600.760852] env[63024]: DEBUG nova.virt.hardware [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1600.761207] env[63024]: DEBUG nova.virt.hardware [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1600.761491] env[63024]: DEBUG nova.virt.hardware [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1600.761790] env[63024]: DEBUG nova.virt.hardware [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1600.762158] env[63024]: DEBUG nova.virt.hardware [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1600.762447] env[63024]: DEBUG nova.virt.hardware [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1600.763562] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f5e2916-f155-42be-9ff0-981f752600a7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1600.773387] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b089fab4-c224-4ca6-82e2-89d19bb9658a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.516502] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6109e178-1053-4869-b98a-483fd65fc66e tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1601.540136] env[63024]: DEBUG oslo_vmware.api [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': task-1950455, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.797257} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1601.541017] env[63024]: DEBUG oslo_vmware.api [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Task: {'id': task-1950454, 'name': ReconfigVM_Task, 'duration_secs': 0.73326} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1601.541017] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 726d9639-1ab4-46a9-975e-5580c8344a37/726d9639-1ab4-46a9-975e-5580c8344a37.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1601.541279] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1601.541473] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Reconfigured VM instance instance-0000000e to attach disk [datastore1] 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd/04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1601.542687] env[63024]: ERROR nova.scheduler.client.report [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [req-3bafb2a2-d774-4659-987d-df7b2437f5ad] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 89dfa68a-133e-436f-a9f1-86051f9fb96b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-3bafb2a2-d774-4659-987d-df7b2437f5ad"}]} [ 1601.542986] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-405f38bf-900f-42f8-9d61-64994d4c989b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.544895] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bbf8f939-5f19-49a6-a880-b073dd00abb5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1601.554609] env[63024]: DEBUG oslo_vmware.api [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Waiting for the task: (returnval){ [ 1601.554609] env[63024]: value = "task-1950457" [ 1601.554609] env[63024]: _type = "Task" [ 1601.554609] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1601.554890] env[63024]: DEBUG oslo_vmware.api [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Waiting for the task: (returnval){ [ 1601.554890] env[63024]: value = "task-1950456" [ 1601.554890] env[63024]: _type = "Task" [ 1601.554890] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1601.567364] env[63024]: DEBUG oslo_vmware.api [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Task: {'id': task-1950457, 'name': Rename_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.568438] env[63024]: DEBUG nova.scheduler.client.report [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Refreshing inventories for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1601.572992] env[63024]: DEBUG oslo_vmware.api [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': task-1950456, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1601.588160] env[63024]: DEBUG nova.scheduler.client.report [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Updating ProviderTree inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1601.588337] env[63024]: DEBUG nova.compute.provider_tree [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1601.609145] env[63024]: DEBUG nova.scheduler.client.report [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Refreshing aggregate associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, aggregates: None {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1601.630989] env[63024]: DEBUG nova.scheduler.client.report [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Refreshing trait associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1602.032479] env[63024]: DEBUG nova.network.neutron [-] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1602.068738] env[63024]: DEBUG oslo_vmware.api [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': task-1950456, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073966} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1602.071521] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1602.071841] env[63024]: DEBUG oslo_vmware.api [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Task: {'id': task-1950457, 'name': Rename_Task, 'duration_secs': 0.301506} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1602.072532] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-875d7336-9199-452e-ac57-6af211149ec5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.074966] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1602.077754] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a5293ba6-db82-4d6d-a472-e1bea35a3e36 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.101482] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Reconfiguring VM instance instance-0000000f to attach disk [datastore1] 726d9639-1ab4-46a9-975e-5580c8344a37/726d9639-1ab4-46a9-975e-5580c8344a37.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1602.105292] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0cbe20f5-d9b1-495e-8ca5-9b20fb74ce5d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.121611] env[63024]: DEBUG oslo_vmware.api [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Waiting for the task: (returnval){ [ 1602.121611] env[63024]: value = "task-1950458" [ 1602.121611] env[63024]: _type = "Task" [ 1602.121611] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1602.127880] env[63024]: DEBUG oslo_vmware.api [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Waiting for the task: (returnval){ [ 1602.127880] env[63024]: value = "task-1950459" [ 1602.127880] env[63024]: _type = "Task" [ 1602.127880] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1602.135136] env[63024]: DEBUG oslo_vmware.api [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Task: {'id': task-1950458, 'name': PowerOnVM_Task} progress is 1%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.143085] env[63024]: DEBUG oslo_vmware.api [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': task-1950459, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.169532] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe76d43c-dc51-485e-b274-b140b804fd7a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.173069] env[63024]: DEBUG nova.network.neutron [req-90695b33-f407-43ee-9d79-e4c5d065919c req-2e2b5abc-7dcf-48e2-bad1-605c9bfb00f1 service nova] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Updated VIF entry in instance network info cache for port 0b9a5894-831a-4645-8fee-497016b36839. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1602.173494] env[63024]: DEBUG nova.network.neutron [req-90695b33-f407-43ee-9d79-e4c5d065919c req-2e2b5abc-7dcf-48e2-bad1-605c9bfb00f1 service nova] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Updating instance_info_cache with network_info: [{"id": "0b9a5894-831a-4645-8fee-497016b36839", "address": "fa:16:3e:16:86:10", "network": {"id": "8df09e32-2cef-4759-a80d-a935973669ba", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-205905598-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.160", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18ce2b5b54b54a71af0b71ba887dd2a4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "11b669be-fb26-4ef8-bdb6-c77ab9d06daf", "external-id": "nsx-vlan-transportzone-633", "segmentation_id": 633, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0b9a5894-83", "ovs_interfaceid": "0b9a5894-831a-4645-8fee-497016b36839", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1602.179838] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34a1a44b-54df-4b8f-b45d-d05883d48ed9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.217794] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98f45331-cdd2-419f-95f6-2011f37f1cd2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.226726] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d32e8ed8-ce78-4942-acff-ec5c631f3617 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1602.242394] env[63024]: DEBUG nova.compute.provider_tree [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1602.244272] env[63024]: DEBUG nova.network.neutron [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Successfully updated port: bd3ccdb3-eda1-48fc-9c1d-6e0e7820bcb8 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1602.535718] env[63024]: INFO nova.compute.manager [-] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] Took 2.24 seconds to deallocate network for instance. [ 1602.642879] env[63024]: DEBUG oslo_vmware.api [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Task: {'id': task-1950458, 'name': PowerOnVM_Task} progress is 1%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.646424] env[63024]: DEBUG oslo_vmware.api [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': task-1950459, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1602.676653] env[63024]: DEBUG oslo_concurrency.lockutils [req-90695b33-f407-43ee-9d79-e4c5d065919c req-2e2b5abc-7dcf-48e2-bad1-605c9bfb00f1 service nova] Releasing lock "refresh_cache-7146277f-2621-4e8f-a14c-49bf4dd052db" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1602.753209] env[63024]: DEBUG oslo_concurrency.lockutils [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Acquiring lock "refresh_cache-17e1dfa2-b104-4aac-928e-6364da155c3d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1602.753473] env[63024]: DEBUG oslo_concurrency.lockutils [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Acquired lock "refresh_cache-17e1dfa2-b104-4aac-928e-6364da155c3d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1602.753666] env[63024]: DEBUG nova.network.neutron [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1602.786918] env[63024]: DEBUG nova.scheduler.client.report [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Updated inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with generation 41 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1602.787213] env[63024]: DEBUG nova.compute.provider_tree [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Updating resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b generation from 41 to 42 during operation: update_inventory {{(pid=63024) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1602.787401] env[63024]: DEBUG nova.compute.provider_tree [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1603.050373] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2da24e96-7f50-431e-9f5c-a2fe4c29f616 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1603.148841] env[63024]: DEBUG oslo_vmware.api [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Task: {'id': task-1950458, 'name': PowerOnVM_Task} progress is 64%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.149339] env[63024]: DEBUG oslo_vmware.api [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': task-1950459, 'name': ReconfigVM_Task, 'duration_secs': 0.64845} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1603.149764] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Reconfigured VM instance instance-0000000f to attach disk [datastore1] 726d9639-1ab4-46a9-975e-5580c8344a37/726d9639-1ab4-46a9-975e-5580c8344a37.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1603.150551] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1eb6e975-0bdb-4721-910a-0c3ba2404f0f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.160172] env[63024]: DEBUG oslo_vmware.api [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Waiting for the task: (returnval){ [ 1603.160172] env[63024]: value = "task-1950460" [ 1603.160172] env[63024]: _type = "Task" [ 1603.160172] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1603.166495] env[63024]: DEBUG nova.compute.manager [req-d715c349-375c-4aa1-82e9-b6964f9c2e45 req-26cd1aab-553e-42b9-a335-73a554d22426 service nova] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Received event network-vif-deleted-07391e4a-67fc-4ff7-8491-8ca4469c68e9 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1603.167203] env[63024]: DEBUG nova.compute.manager [req-d715c349-375c-4aa1-82e9-b6964f9c2e45 req-26cd1aab-553e-42b9-a335-73a554d22426 service nova] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] Received event network-vif-deleted-459dce7c-f846-4532-bf5c-5ae83c31b50a {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1603.167203] env[63024]: DEBUG nova.compute.manager [req-d715c349-375c-4aa1-82e9-b6964f9c2e45 req-26cd1aab-553e-42b9-a335-73a554d22426 service nova] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Received event network-vif-plugged-bd3ccdb3-eda1-48fc-9c1d-6e0e7820bcb8 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1603.167588] env[63024]: DEBUG oslo_concurrency.lockutils [req-d715c349-375c-4aa1-82e9-b6964f9c2e45 req-26cd1aab-553e-42b9-a335-73a554d22426 service nova] Acquiring lock "17e1dfa2-b104-4aac-928e-6364da155c3d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1603.169587] env[63024]: DEBUG oslo_concurrency.lockutils [req-d715c349-375c-4aa1-82e9-b6964f9c2e45 req-26cd1aab-553e-42b9-a335-73a554d22426 service nova] Lock "17e1dfa2-b104-4aac-928e-6364da155c3d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1603.169587] env[63024]: DEBUG oslo_concurrency.lockutils [req-d715c349-375c-4aa1-82e9-b6964f9c2e45 req-26cd1aab-553e-42b9-a335-73a554d22426 service nova] Lock "17e1dfa2-b104-4aac-928e-6364da155c3d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1603.169587] env[63024]: DEBUG nova.compute.manager [req-d715c349-375c-4aa1-82e9-b6964f9c2e45 req-26cd1aab-553e-42b9-a335-73a554d22426 service nova] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] No waiting events found dispatching network-vif-plugged-bd3ccdb3-eda1-48fc-9c1d-6e0e7820bcb8 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1603.169587] env[63024]: WARNING nova.compute.manager [req-d715c349-375c-4aa1-82e9-b6964f9c2e45 req-26cd1aab-553e-42b9-a335-73a554d22426 service nova] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Received unexpected event network-vif-plugged-bd3ccdb3-eda1-48fc-9c1d-6e0e7820bcb8 for instance with vm_state building and task_state spawning. [ 1603.175973] env[63024]: DEBUG oslo_vmware.api [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': task-1950460, 'name': Rename_Task} progress is 10%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.293192] env[63024]: DEBUG oslo_concurrency.lockutils [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.601s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1603.293777] env[63024]: DEBUG nova.compute.manager [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1603.296732] env[63024]: DEBUG oslo_concurrency.lockutils [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.360s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1603.301229] env[63024]: INFO nova.compute.claims [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1603.306300] env[63024]: DEBUG nova.network.neutron [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1603.384359] env[63024]: DEBUG oslo_concurrency.lockutils [None req-873d6a6c-5457-40be-80dc-b062a2d3e47a tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Acquiring lock "6e477ec2-9270-42b1-85bd-a315460d9cab" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1603.384942] env[63024]: DEBUG oslo_concurrency.lockutils [None req-873d6a6c-5457-40be-80dc-b062a2d3e47a tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Lock "6e477ec2-9270-42b1-85bd-a315460d9cab" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1603.384942] env[63024]: DEBUG oslo_concurrency.lockutils [None req-873d6a6c-5457-40be-80dc-b062a2d3e47a tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Acquiring lock "6e477ec2-9270-42b1-85bd-a315460d9cab-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1603.385208] env[63024]: DEBUG oslo_concurrency.lockutils [None req-873d6a6c-5457-40be-80dc-b062a2d3e47a tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Lock "6e477ec2-9270-42b1-85bd-a315460d9cab-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1603.385253] env[63024]: DEBUG oslo_concurrency.lockutils [None req-873d6a6c-5457-40be-80dc-b062a2d3e47a tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Lock "6e477ec2-9270-42b1-85bd-a315460d9cab-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1603.390521] env[63024]: INFO nova.compute.manager [None req-873d6a6c-5457-40be-80dc-b062a2d3e47a tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Terminating instance [ 1603.634253] env[63024]: DEBUG oslo_vmware.api [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Task: {'id': task-1950458, 'name': PowerOnVM_Task} progress is 82%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.672416] env[63024]: DEBUG oslo_vmware.api [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': task-1950460, 'name': Rename_Task, 'duration_secs': 0.197061} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1603.672904] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1603.673163] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b581f762-603f-4e95-9be1-f831101bdfd5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.681589] env[63024]: DEBUG oslo_vmware.api [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Waiting for the task: (returnval){ [ 1603.681589] env[63024]: value = "task-1950461" [ 1603.681589] env[63024]: _type = "Task" [ 1603.681589] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1603.681589] env[63024]: DEBUG nova.network.neutron [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Updating instance_info_cache with network_info: [{"id": "bd3ccdb3-eda1-48fc-9c1d-6e0e7820bcb8", "address": "fa:16:3e:4d:8d:95", "network": {"id": "661f5b82-5445-4228-ad4b-751d343d891e", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-444193488-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a7079d038d284096b56aaf3a4f93d1d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "274afb4c-04df-4213-8ad2-8f48a10d78a8", "external-id": "nsx-vlan-transportzone-515", "segmentation_id": 515, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd3ccdb3-ed", "ovs_interfaceid": "bd3ccdb3-eda1-48fc-9c1d-6e0e7820bcb8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1603.696992] env[63024]: DEBUG oslo_vmware.api [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': task-1950461, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.807860] env[63024]: DEBUG nova.compute.utils [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1603.810776] env[63024]: DEBUG nova.compute.manager [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1603.810973] env[63024]: DEBUG nova.network.neutron [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: e2138192-14e0-43d2-9d19-9820747d7217] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1603.896553] env[63024]: DEBUG nova.compute.manager [None req-873d6a6c-5457-40be-80dc-b062a2d3e47a tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1603.896780] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-873d6a6c-5457-40be-80dc-b062a2d3e47a tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1603.897761] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba9d2b34-e377-41d6-bd8a-3491300350a1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.906793] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-873d6a6c-5457-40be-80dc-b062a2d3e47a tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1603.907062] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-899bca0a-e995-45b4-93f5-b5791bdd398f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1603.917437] env[63024]: DEBUG oslo_vmware.api [None req-873d6a6c-5457-40be-80dc-b062a2d3e47a tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Waiting for the task: (returnval){ [ 1603.917437] env[63024]: value = "task-1950462" [ 1603.917437] env[63024]: _type = "Task" [ 1603.917437] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1603.933029] env[63024]: DEBUG oslo_vmware.api [None req-873d6a6c-5457-40be-80dc-b062a2d3e47a tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Task: {'id': task-1950462, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1603.940175] env[63024]: DEBUG nova.policy [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '67d3796d3d5d4eebba5ecf8e611190c9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '751ed00ef16a4cca832e3c78731c9379', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1604.134936] env[63024]: DEBUG oslo_vmware.api [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Task: {'id': task-1950458, 'name': PowerOnVM_Task, 'duration_secs': 1.738528} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1604.135241] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1604.135441] env[63024]: INFO nova.compute.manager [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Took 16.64 seconds to spawn the instance on the hypervisor. [ 1604.135679] env[63024]: DEBUG nova.compute.manager [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1604.136410] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13b9544c-81f9-4be1-9746-cef7e8f4dfde {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.199542] env[63024]: DEBUG oslo_concurrency.lockutils [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Releasing lock "refresh_cache-17e1dfa2-b104-4aac-928e-6364da155c3d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1604.200063] env[63024]: DEBUG nova.compute.manager [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Instance network_info: |[{"id": "bd3ccdb3-eda1-48fc-9c1d-6e0e7820bcb8", "address": "fa:16:3e:4d:8d:95", "network": {"id": "661f5b82-5445-4228-ad4b-751d343d891e", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-444193488-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a7079d038d284096b56aaf3a4f93d1d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "274afb4c-04df-4213-8ad2-8f48a10d78a8", "external-id": "nsx-vlan-transportzone-515", "segmentation_id": 515, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd3ccdb3-ed", "ovs_interfaceid": "bd3ccdb3-eda1-48fc-9c1d-6e0e7820bcb8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1604.200313] env[63024]: DEBUG oslo_vmware.api [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': task-1950461, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.201523] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4d:8d:95', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '274afb4c-04df-4213-8ad2-8f48a10d78a8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bd3ccdb3-eda1-48fc-9c1d-6e0e7820bcb8', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1604.210702] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Creating folder: Project (a7079d038d284096b56aaf3a4f93d1d9). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1604.211240] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6484d89b-11bb-48be-a401-04629a830870 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.223728] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Created folder: Project (a7079d038d284096b56aaf3a4f93d1d9) in parent group-v401959. [ 1604.223867] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Creating folder: Instances. Parent ref: group-v402009. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1604.224118] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d2acf2ba-4913-4e62-9372-f29276919d8e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.238029] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Created folder: Instances in parent group-v402009. [ 1604.238211] env[63024]: DEBUG oslo.service.loopingcall [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1604.238366] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1604.238568] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b5f64f5b-89e0-417a-a68e-3e019689fb29 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.262675] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1604.262675] env[63024]: value = "task-1950465" [ 1604.262675] env[63024]: _type = "Task" [ 1604.262675] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1604.272652] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950465, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.315690] env[63024]: DEBUG nova.compute.manager [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1604.433868] env[63024]: DEBUG oslo_vmware.api [None req-873d6a6c-5457-40be-80dc-b062a2d3e47a tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Task: {'id': task-1950462, 'name': PowerOffVM_Task, 'duration_secs': 0.215459} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1604.434364] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-873d6a6c-5457-40be-80dc-b062a2d3e47a tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1604.434524] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-873d6a6c-5457-40be-80dc-b062a2d3e47a tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1604.434776] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f2d9c008-88a9-4fc5-84a7-e5846a95da04 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.504091] env[63024]: DEBUG nova.network.neutron [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Successfully created port: 630d0aef-3424-48b2-90be-fca999b2ed17 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1604.507332] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-873d6a6c-5457-40be-80dc-b062a2d3e47a tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1604.507332] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-873d6a6c-5457-40be-80dc-b062a2d3e47a tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1604.507332] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-873d6a6c-5457-40be-80dc-b062a2d3e47a tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Deleting the datastore file [datastore1] 6e477ec2-9270-42b1-85bd-a315460d9cab {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1604.507332] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-921a56d5-bfab-4eed-9f89-1104ad5fbc1e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.516959] env[63024]: DEBUG oslo_vmware.api [None req-873d6a6c-5457-40be-80dc-b062a2d3e47a tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Waiting for the task: (returnval){ [ 1604.516959] env[63024]: value = "task-1950467" [ 1604.516959] env[63024]: _type = "Task" [ 1604.516959] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1604.533031] env[63024]: DEBUG oslo_vmware.api [None req-873d6a6c-5457-40be-80dc-b062a2d3e47a tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Task: {'id': task-1950467, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.664595] env[63024]: INFO nova.compute.manager [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Took 33.14 seconds to build instance. [ 1604.695522] env[63024]: DEBUG oslo_vmware.api [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': task-1950461, 'name': PowerOnVM_Task, 'duration_secs': 0.75014} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1604.699864] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1604.700157] env[63024]: INFO nova.compute.manager [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Took 6.67 seconds to spawn the instance on the hypervisor. [ 1604.700363] env[63024]: DEBUG nova.compute.manager [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1604.705869] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c7ae703-447d-4420-a159-c8d808c392cf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.773592] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950465, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1604.982840] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faa81e17-be3e-4825-952d-a78732b83512 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1604.991177] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2951ca1b-1732-4dea-8f2a-04fd2e260515 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.029073] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9f03016-0abe-4680-9eef-e53e8fceb8e0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.036453] env[63024]: DEBUG oslo_vmware.api [None req-873d6a6c-5457-40be-80dc-b062a2d3e47a tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Task: {'id': task-1950467, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.367726} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1605.039028] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-873d6a6c-5457-40be-80dc-b062a2d3e47a tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1605.039028] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-873d6a6c-5457-40be-80dc-b062a2d3e47a tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1605.039028] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-873d6a6c-5457-40be-80dc-b062a2d3e47a tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1605.039304] env[63024]: INFO nova.compute.manager [None req-873d6a6c-5457-40be-80dc-b062a2d3e47a tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1605.039399] env[63024]: DEBUG oslo.service.loopingcall [None req-873d6a6c-5457-40be-80dc-b062a2d3e47a tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1605.039661] env[63024]: DEBUG nova.compute.manager [-] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1605.039763] env[63024]: DEBUG nova.network.neutron [-] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1605.045023] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26da4fbc-7aee-4a3a-8e93-3c7688326af9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.062068] env[63024]: DEBUG nova.compute.provider_tree [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1605.166666] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8685664a-71cd-4732-8fff-61879c2b09b6 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Lock "04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.657s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1605.226814] env[63024]: INFO nova.compute.manager [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Took 25.18 seconds to build instance. [ 1605.277284] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950465, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1605.330481] env[63024]: DEBUG nova.compute.manager [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1605.356120] env[63024]: DEBUG nova.virt.hardware [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1605.356120] env[63024]: DEBUG nova.virt.hardware [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1605.356120] env[63024]: DEBUG nova.virt.hardware [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1605.356288] env[63024]: DEBUG nova.virt.hardware [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1605.356288] env[63024]: DEBUG nova.virt.hardware [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1605.356288] env[63024]: DEBUG nova.virt.hardware [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1605.356288] env[63024]: DEBUG nova.virt.hardware [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1605.356288] env[63024]: DEBUG nova.virt.hardware [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1605.356439] env[63024]: DEBUG nova.virt.hardware [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1605.356699] env[63024]: DEBUG nova.virt.hardware [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1605.357110] env[63024]: DEBUG nova.virt.hardware [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1605.358098] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95662653-4132-41b5-82f3-efb7d2163cff {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.367939] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fdd153a-65f2-4c0e-9c54-590fb943be51 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1605.569019] env[63024]: DEBUG nova.scheduler.client.report [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1605.669627] env[63024]: DEBUG nova.compute.manager [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1605.729853] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5861cf58-f078-441e-8bb3-16fe77911ee3 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Lock "726d9639-1ab4-46a9-975e-5580c8344a37" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.812s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1605.774604] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950465, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.071295] env[63024]: DEBUG nova.compute.manager [req-01dfbaff-1440-4dfc-b61b-5cd216dfd9df req-4f7f2c9d-d70a-4370-99d6-024ee462e689 service nova] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Received event network-changed-bd3ccdb3-eda1-48fc-9c1d-6e0e7820bcb8 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1606.071513] env[63024]: DEBUG nova.compute.manager [req-01dfbaff-1440-4dfc-b61b-5cd216dfd9df req-4f7f2c9d-d70a-4370-99d6-024ee462e689 service nova] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Refreshing instance network info cache due to event network-changed-bd3ccdb3-eda1-48fc-9c1d-6e0e7820bcb8. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1606.071791] env[63024]: DEBUG oslo_concurrency.lockutils [req-01dfbaff-1440-4dfc-b61b-5cd216dfd9df req-4f7f2c9d-d70a-4370-99d6-024ee462e689 service nova] Acquiring lock "refresh_cache-17e1dfa2-b104-4aac-928e-6364da155c3d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1606.071884] env[63024]: DEBUG oslo_concurrency.lockutils [req-01dfbaff-1440-4dfc-b61b-5cd216dfd9df req-4f7f2c9d-d70a-4370-99d6-024ee462e689 service nova] Acquired lock "refresh_cache-17e1dfa2-b104-4aac-928e-6364da155c3d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1606.071990] env[63024]: DEBUG nova.network.neutron [req-01dfbaff-1440-4dfc-b61b-5cd216dfd9df req-4f7f2c9d-d70a-4370-99d6-024ee462e689 service nova] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Refreshing network info cache for port bd3ccdb3-eda1-48fc-9c1d-6e0e7820bcb8 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1606.074142] env[63024]: DEBUG oslo_concurrency.lockutils [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.778s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1606.078399] env[63024]: DEBUG nova.compute.manager [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1606.078399] env[63024]: DEBUG oslo_concurrency.lockutils [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.917s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1606.079886] env[63024]: INFO nova.compute.claims [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1606.083671] env[63024]: DEBUG nova.network.neutron [-] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1606.197846] env[63024]: DEBUG oslo_concurrency.lockutils [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1606.233953] env[63024]: DEBUG nova.compute.manager [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1606.273806] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950465, 'name': CreateVM_Task, 'duration_secs': 1.591584} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1606.274032] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1606.274737] env[63024]: DEBUG oslo_concurrency.lockutils [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1606.274916] env[63024]: DEBUG oslo_concurrency.lockutils [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1606.275265] env[63024]: DEBUG oslo_concurrency.lockutils [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1606.275542] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b6ae94f-6c00-4829-b0bf-7edb08a784f4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.280929] env[63024]: DEBUG oslo_vmware.api [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Waiting for the task: (returnval){ [ 1606.280929] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e55ac2-e6b1-dcad-102a-85d28c640fce" [ 1606.280929] env[63024]: _type = "Task" [ 1606.280929] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1606.291514] env[63024]: DEBUG oslo_vmware.api [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e55ac2-e6b1-dcad-102a-85d28c640fce, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.589541] env[63024]: DEBUG nova.compute.utils [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1606.592987] env[63024]: INFO nova.compute.manager [-] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Took 1.55 seconds to deallocate network for instance. [ 1606.593780] env[63024]: DEBUG nova.compute.manager [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1606.594180] env[63024]: DEBUG nova.network.neutron [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1606.673984] env[63024]: DEBUG oslo_concurrency.lockutils [None req-63097460-d1d2-46a2-812a-bf56f160dfc1 tempest-ServersListShow296Test-2106682744 tempest-ServersListShow296Test-2106682744-project-member] Acquiring lock "b420b8c1-7d95-4f84-8396-8e500c9a787a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1606.674268] env[63024]: DEBUG oslo_concurrency.lockutils [None req-63097460-d1d2-46a2-812a-bf56f160dfc1 tempest-ServersListShow296Test-2106682744 tempest-ServersListShow296Test-2106682744-project-member] Lock "b420b8c1-7d95-4f84-8396-8e500c9a787a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1606.746813] env[63024]: DEBUG nova.policy [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8ef4d30001db4c8e9e54b343f2d5a323', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3bb863257aeb4fd68e2a10b72750f0ef', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1606.768718] env[63024]: DEBUG oslo_concurrency.lockutils [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1606.797083] env[63024]: DEBUG oslo_vmware.api [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e55ac2-e6b1-dcad-102a-85d28c640fce, 'name': SearchDatastore_Task, 'duration_secs': 0.016007} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1606.797421] env[63024]: DEBUG oslo_concurrency.lockutils [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1606.797906] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1606.798539] env[63024]: DEBUG oslo_concurrency.lockutils [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1606.798721] env[63024]: DEBUG oslo_concurrency.lockutils [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1606.800130] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1606.800130] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8c6da9e6-828d-45a2-ac2d-4ace0e2e8dea {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.822364] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1606.822562] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1606.825339] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4970e148-2ab2-47eb-8134-2b8b357d3c5b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.836309] env[63024]: DEBUG oslo_vmware.api [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Waiting for the task: (returnval){ [ 1606.836309] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b2b98f-7b2c-7843-f343-d7e2a61d8b19" [ 1606.836309] env[63024]: _type = "Task" [ 1606.836309] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1606.844436] env[63024]: DEBUG oslo_vmware.api [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b2b98f-7b2c-7843-f343-d7e2a61d8b19, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1606.897143] env[63024]: DEBUG nova.network.neutron [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Successfully updated port: 630d0aef-3424-48b2-90be-fca999b2ed17 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1606.972808] env[63024]: INFO nova.compute.manager [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Rebuilding instance [ 1607.029609] env[63024]: DEBUG nova.compute.manager [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1607.030950] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38beec45-0736-4bd3-9296-aa3d15714698 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.099549] env[63024]: DEBUG nova.compute.manager [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1607.103719] env[63024]: DEBUG oslo_concurrency.lockutils [None req-873d6a6c-5457-40be-80dc-b062a2d3e47a tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1607.232316] env[63024]: DEBUG nova.network.neutron [req-01dfbaff-1440-4dfc-b61b-5cd216dfd9df req-4f7f2c9d-d70a-4370-99d6-024ee462e689 service nova] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Updated VIF entry in instance network info cache for port bd3ccdb3-eda1-48fc-9c1d-6e0e7820bcb8. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1607.232728] env[63024]: DEBUG nova.network.neutron [req-01dfbaff-1440-4dfc-b61b-5cd216dfd9df req-4f7f2c9d-d70a-4370-99d6-024ee462e689 service nova] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Updating instance_info_cache with network_info: [{"id": "bd3ccdb3-eda1-48fc-9c1d-6e0e7820bcb8", "address": "fa:16:3e:4d:8d:95", "network": {"id": "661f5b82-5445-4228-ad4b-751d343d891e", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-444193488-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a7079d038d284096b56aaf3a4f93d1d9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "274afb4c-04df-4213-8ad2-8f48a10d78a8", "external-id": "nsx-vlan-transportzone-515", "segmentation_id": 515, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd3ccdb3-ed", "ovs_interfaceid": "bd3ccdb3-eda1-48fc-9c1d-6e0e7820bcb8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1607.352470] env[63024]: DEBUG oslo_vmware.api [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b2b98f-7b2c-7843-f343-d7e2a61d8b19, 'name': SearchDatastore_Task, 'duration_secs': 0.01384} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1607.353353] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35aeeb19-d3b1-4620-8d10-f3df7b26dfea {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.358900] env[63024]: DEBUG oslo_vmware.api [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Waiting for the task: (returnval){ [ 1607.358900] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52acca9f-8923-67c2-4e2b-886192c7f92a" [ 1607.358900] env[63024]: _type = "Task" [ 1607.358900] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1607.370087] env[63024]: DEBUG oslo_vmware.api [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52acca9f-8923-67c2-4e2b-886192c7f92a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.401865] env[63024]: DEBUG oslo_concurrency.lockutils [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Acquiring lock "refresh_cache-e2138192-14e0-43d2-9d19-9820747d7217" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1607.401865] env[63024]: DEBUG oslo_concurrency.lockutils [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Acquired lock "refresh_cache-e2138192-14e0-43d2-9d19-9820747d7217" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1607.401981] env[63024]: DEBUG nova.network.neutron [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1607.409683] env[63024]: DEBUG oslo_vmware.rw_handles [None req-4d31c815-cef6-4d7c-ba79-0fc82737621e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52059687-d3e9-60dc-0274-d7847d0f2d1b/disk-0.vmdk. {{(pid=63024) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1607.411206] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-390a2db5-0876-4913-ab73-1c0862333348 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.419928] env[63024]: DEBUG oslo_vmware.rw_handles [None req-4d31c815-cef6-4d7c-ba79-0fc82737621e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52059687-d3e9-60dc-0274-d7847d0f2d1b/disk-0.vmdk is in state: ready. {{(pid=63024) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1607.420122] env[63024]: ERROR oslo_vmware.rw_handles [None req-4d31c815-cef6-4d7c-ba79-0fc82737621e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52059687-d3e9-60dc-0274-d7847d0f2d1b/disk-0.vmdk due to incomplete transfer. [ 1607.420357] env[63024]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-528eb7d9-5d55-4dc3-a27a-982b71deb1bf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.428754] env[63024]: DEBUG oslo_vmware.rw_handles [None req-4d31c815-cef6-4d7c-ba79-0fc82737621e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52059687-d3e9-60dc-0274-d7847d0f2d1b/disk-0.vmdk. {{(pid=63024) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1607.428754] env[63024]: DEBUG nova.virt.vmwareapi.images [None req-4d31c815-cef6-4d7c-ba79-0fc82737621e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Uploaded image e81db031-aecd-4d46-b472-c5cb3d8c71b0 to the Glance image server {{(pid=63024) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1607.434050] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-4d31c815-cef6-4d7c-ba79-0fc82737621e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Destroying the VM {{(pid=63024) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1607.434050] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-4bd796c4-79c9-4928-8f4c-974d2a373c71 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.437519] env[63024]: DEBUG oslo_vmware.api [None req-4d31c815-cef6-4d7c-ba79-0fc82737621e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Waiting for the task: (returnval){ [ 1607.437519] env[63024]: value = "task-1950468" [ 1607.437519] env[63024]: _type = "Task" [ 1607.437519] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1607.449434] env[63024]: DEBUG oslo_vmware.api [None req-4d31c815-cef6-4d7c-ba79-0fc82737621e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950468, 'name': Destroy_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.610931] env[63024]: DEBUG nova.network.neutron [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Successfully created port: a3574d89-a818-4dbd-bf07-78ac14b00783 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1607.631171] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7065e57a-856e-40e6-8ff9-8fffaa22697b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.640736] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f569e6d5-75c4-476a-8b53-5e125186cfb0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.675019] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4103b78-0920-4e90-bd9c-db71368eaec0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.683831] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a75448b6-4931-49cd-af2d-5681315262ba {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.701289] env[63024]: DEBUG nova.compute.provider_tree [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1607.736033] env[63024]: DEBUG oslo_concurrency.lockutils [req-01dfbaff-1440-4dfc-b61b-5cd216dfd9df req-4f7f2c9d-d70a-4370-99d6-024ee462e689 service nova] Releasing lock "refresh_cache-17e1dfa2-b104-4aac-928e-6364da155c3d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1607.736344] env[63024]: DEBUG nova.compute.manager [req-01dfbaff-1440-4dfc-b61b-5cd216dfd9df req-4f7f2c9d-d70a-4370-99d6-024ee462e689 service nova] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Received event network-changed-d962584b-9fa7-4c73-b446-b432b537aafd {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1607.736786] env[63024]: DEBUG nova.compute.manager [req-01dfbaff-1440-4dfc-b61b-5cd216dfd9df req-4f7f2c9d-d70a-4370-99d6-024ee462e689 service nova] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Refreshing instance network info cache due to event network-changed-d962584b-9fa7-4c73-b446-b432b537aafd. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1607.736786] env[63024]: DEBUG oslo_concurrency.lockutils [req-01dfbaff-1440-4dfc-b61b-5cd216dfd9df req-4f7f2c9d-d70a-4370-99d6-024ee462e689 service nova] Acquiring lock "refresh_cache-e03b8577-9298-4e88-98ea-6258e97db28d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1607.736992] env[63024]: DEBUG oslo_concurrency.lockutils [req-01dfbaff-1440-4dfc-b61b-5cd216dfd9df req-4f7f2c9d-d70a-4370-99d6-024ee462e689 service nova] Acquired lock "refresh_cache-e03b8577-9298-4e88-98ea-6258e97db28d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1607.736992] env[63024]: DEBUG nova.network.neutron [req-01dfbaff-1440-4dfc-b61b-5cd216dfd9df req-4f7f2c9d-d70a-4370-99d6-024ee462e689 service nova] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Refreshing network info cache for port d962584b-9fa7-4c73-b446-b432b537aafd {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1607.876242] env[63024]: DEBUG oslo_vmware.api [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52acca9f-8923-67c2-4e2b-886192c7f92a, 'name': SearchDatastore_Task, 'duration_secs': 0.025048} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1607.876598] env[63024]: DEBUG oslo_concurrency.lockutils [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1607.876866] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 17e1dfa2-b104-4aac-928e-6364da155c3d/17e1dfa2-b104-4aac-928e-6364da155c3d.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1607.877492] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-36e7dd56-f7d1-409d-89cc-6203083ed83f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1607.885087] env[63024]: DEBUG oslo_vmware.api [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Waiting for the task: (returnval){ [ 1607.885087] env[63024]: value = "task-1950469" [ 1607.885087] env[63024]: _type = "Task" [ 1607.885087] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1607.893195] env[63024]: DEBUG oslo_vmware.api [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Task: {'id': task-1950469, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.947929] env[63024]: DEBUG oslo_vmware.api [None req-4d31c815-cef6-4d7c-ba79-0fc82737621e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950468, 'name': Destroy_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1607.962161] env[63024]: DEBUG nova.network.neutron [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1608.059568] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1608.059568] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-91e2d4b0-3bb2-45b7-8d97-fb57ce1c7e73 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.067105] env[63024]: DEBUG oslo_vmware.api [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Waiting for the task: (returnval){ [ 1608.067105] env[63024]: value = "task-1950470" [ 1608.067105] env[63024]: _type = "Task" [ 1608.067105] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1608.074330] env[63024]: DEBUG nova.compute.manager [req-3f47e54b-0f24-4548-b397-a1c4363c954c req-1c2d0b85-7218-4cf4-9c1a-67187cc2edb6 service nova] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Received event network-changed-879d1c91-c785-4da7-852e-abd159810127 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1608.075210] env[63024]: DEBUG nova.compute.manager [req-3f47e54b-0f24-4548-b397-a1c4363c954c req-1c2d0b85-7218-4cf4-9c1a-67187cc2edb6 service nova] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Refreshing instance network info cache due to event network-changed-879d1c91-c785-4da7-852e-abd159810127. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1608.075581] env[63024]: DEBUG oslo_concurrency.lockutils [req-3f47e54b-0f24-4548-b397-a1c4363c954c req-1c2d0b85-7218-4cf4-9c1a-67187cc2edb6 service nova] Acquiring lock "refresh_cache-04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1608.075962] env[63024]: DEBUG oslo_concurrency.lockutils [req-3f47e54b-0f24-4548-b397-a1c4363c954c req-1c2d0b85-7218-4cf4-9c1a-67187cc2edb6 service nova] Acquired lock "refresh_cache-04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1608.076257] env[63024]: DEBUG nova.network.neutron [req-3f47e54b-0f24-4548-b397-a1c4363c954c req-1c2d0b85-7218-4cf4-9c1a-67187cc2edb6 service nova] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Refreshing network info cache for port 879d1c91-c785-4da7-852e-abd159810127 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1608.089482] env[63024]: DEBUG oslo_vmware.api [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': task-1950470, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.121964] env[63024]: DEBUG nova.compute.manager [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1608.149018] env[63024]: DEBUG nova.virt.hardware [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1608.149018] env[63024]: DEBUG nova.virt.hardware [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1608.149018] env[63024]: DEBUG nova.virt.hardware [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1608.149253] env[63024]: DEBUG nova.virt.hardware [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1608.149253] env[63024]: DEBUG nova.virt.hardware [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1608.149253] env[63024]: DEBUG nova.virt.hardware [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1608.149253] env[63024]: DEBUG nova.virt.hardware [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1608.149758] env[63024]: DEBUG nova.virt.hardware [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1608.150093] env[63024]: DEBUG nova.virt.hardware [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1608.150405] env[63024]: DEBUG nova.virt.hardware [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1608.150720] env[63024]: DEBUG nova.virt.hardware [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1608.151863] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6519561d-18f0-4c8e-9341-1fd690f91524 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.161321] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0c4a018-1c7f-45fc-a3e3-f61c03268cff {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.205533] env[63024]: DEBUG nova.network.neutron [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Updating instance_info_cache with network_info: [{"id": "630d0aef-3424-48b2-90be-fca999b2ed17", "address": "fa:16:3e:9f:98:e3", "network": {"id": "dab57617-8c96-4c9c-a117-05fd2262c951", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1124018667-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "751ed00ef16a4cca832e3c78731c9379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13d625c9-77ec-4edb-a56b-9f37a314cc39", "external-id": "nsx-vlan-transportzone-358", "segmentation_id": 358, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap630d0aef-34", "ovs_interfaceid": "630d0aef-3424-48b2-90be-fca999b2ed17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1608.205713] env[63024]: DEBUG nova.scheduler.client.report [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1608.395311] env[63024]: DEBUG oslo_vmware.api [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Task: {'id': task-1950469, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.449694] env[63024]: DEBUG oslo_vmware.api [None req-4d31c815-cef6-4d7c-ba79-0fc82737621e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950468, 'name': Destroy_Task} progress is 33%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.499232] env[63024]: DEBUG nova.network.neutron [req-01dfbaff-1440-4dfc-b61b-5cd216dfd9df req-4f7f2c9d-d70a-4370-99d6-024ee462e689 service nova] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Updated VIF entry in instance network info cache for port d962584b-9fa7-4c73-b446-b432b537aafd. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1608.499701] env[63024]: DEBUG nova.network.neutron [req-01dfbaff-1440-4dfc-b61b-5cd216dfd9df req-4f7f2c9d-d70a-4370-99d6-024ee462e689 service nova] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Updating instance_info_cache with network_info: [{"id": "d962584b-9fa7-4c73-b446-b432b537aafd", "address": "fa:16:3e:7a:de:28", "network": {"id": "7c2acd24-557d-4fb0-bb17-ae985019dd54", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-1770461600-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9860b12ec09944ddacb54f69a18d4c4c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7bcd9d2d-25c8-41ad-9a4a-93b9029ba993", "external-id": "nsx-vlan-transportzone-734", "segmentation_id": 734, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd962584b-9f", "ovs_interfaceid": "d962584b-9fa7-4c73-b446-b432b537aafd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1608.581104] env[63024]: DEBUG oslo_vmware.api [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': task-1950470, 'name': PowerOffVM_Task, 'duration_secs': 0.22154} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1608.581439] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1608.581730] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1608.582888] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab6c1e35-c2ae-4d0b-8599-11daf411b025 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.592583] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1608.592855] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-386d809d-e834-4f17-a29e-c86b09871f3d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.619948] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1608.620183] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1608.620367] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Deleting the datastore file [datastore1] 726d9639-1ab4-46a9-975e-5580c8344a37 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1608.620703] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4ed403bc-67ef-48a3-81fe-a1ee0217ea92 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.629990] env[63024]: DEBUG oslo_vmware.api [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Waiting for the task: (returnval){ [ 1608.629990] env[63024]: value = "task-1950472" [ 1608.629990] env[63024]: _type = "Task" [ 1608.629990] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1608.642932] env[63024]: DEBUG oslo_vmware.api [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': task-1950472, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.666240] env[63024]: DEBUG nova.compute.manager [req-e0832c18-e6b6-4ab8-ab51-3cd7c940a094 req-61ffcf0a-f15f-4508-a5c2-a4bfc04866fc service nova] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Received event network-vif-deleted-de190f04-f1aa-479a-b49d-4cf36ac4475f {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1608.666539] env[63024]: DEBUG nova.compute.manager [req-e0832c18-e6b6-4ab8-ab51-3cd7c940a094 req-61ffcf0a-f15f-4508-a5c2-a4bfc04866fc service nova] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Received event network-vif-plugged-630d0aef-3424-48b2-90be-fca999b2ed17 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1608.666742] env[63024]: DEBUG oslo_concurrency.lockutils [req-e0832c18-e6b6-4ab8-ab51-3cd7c940a094 req-61ffcf0a-f15f-4508-a5c2-a4bfc04866fc service nova] Acquiring lock "e2138192-14e0-43d2-9d19-9820747d7217-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1608.666956] env[63024]: DEBUG oslo_concurrency.lockutils [req-e0832c18-e6b6-4ab8-ab51-3cd7c940a094 req-61ffcf0a-f15f-4508-a5c2-a4bfc04866fc service nova] Lock "e2138192-14e0-43d2-9d19-9820747d7217-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1608.667156] env[63024]: DEBUG oslo_concurrency.lockutils [req-e0832c18-e6b6-4ab8-ab51-3cd7c940a094 req-61ffcf0a-f15f-4508-a5c2-a4bfc04866fc service nova] Lock "e2138192-14e0-43d2-9d19-9820747d7217-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1608.667355] env[63024]: DEBUG nova.compute.manager [req-e0832c18-e6b6-4ab8-ab51-3cd7c940a094 req-61ffcf0a-f15f-4508-a5c2-a4bfc04866fc service nova] [instance: e2138192-14e0-43d2-9d19-9820747d7217] No waiting events found dispatching network-vif-plugged-630d0aef-3424-48b2-90be-fca999b2ed17 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1608.667576] env[63024]: WARNING nova.compute.manager [req-e0832c18-e6b6-4ab8-ab51-3cd7c940a094 req-61ffcf0a-f15f-4508-a5c2-a4bfc04866fc service nova] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Received unexpected event network-vif-plugged-630d0aef-3424-48b2-90be-fca999b2ed17 for instance with vm_state building and task_state spawning. [ 1608.667725] env[63024]: DEBUG nova.compute.manager [req-e0832c18-e6b6-4ab8-ab51-3cd7c940a094 req-61ffcf0a-f15f-4508-a5c2-a4bfc04866fc service nova] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Received event network-changed-630d0aef-3424-48b2-90be-fca999b2ed17 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1608.667880] env[63024]: DEBUG nova.compute.manager [req-e0832c18-e6b6-4ab8-ab51-3cd7c940a094 req-61ffcf0a-f15f-4508-a5c2-a4bfc04866fc service nova] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Refreshing instance network info cache due to event network-changed-630d0aef-3424-48b2-90be-fca999b2ed17. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1608.668057] env[63024]: DEBUG oslo_concurrency.lockutils [req-e0832c18-e6b6-4ab8-ab51-3cd7c940a094 req-61ffcf0a-f15f-4508-a5c2-a4bfc04866fc service nova] Acquiring lock "refresh_cache-e2138192-14e0-43d2-9d19-9820747d7217" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1608.709927] env[63024]: DEBUG oslo_concurrency.lockutils [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Releasing lock "refresh_cache-e2138192-14e0-43d2-9d19-9820747d7217" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1608.710386] env[63024]: DEBUG nova.compute.manager [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Instance network_info: |[{"id": "630d0aef-3424-48b2-90be-fca999b2ed17", "address": "fa:16:3e:9f:98:e3", "network": {"id": "dab57617-8c96-4c9c-a117-05fd2262c951", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1124018667-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "751ed00ef16a4cca832e3c78731c9379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13d625c9-77ec-4edb-a56b-9f37a314cc39", "external-id": "nsx-vlan-transportzone-358", "segmentation_id": 358, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap630d0aef-34", "ovs_interfaceid": "630d0aef-3424-48b2-90be-fca999b2ed17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1608.711266] env[63024]: DEBUG oslo_concurrency.lockutils [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.633s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1608.711843] env[63024]: DEBUG nova.compute.manager [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1608.714595] env[63024]: DEBUG oslo_concurrency.lockutils [req-e0832c18-e6b6-4ab8-ab51-3cd7c940a094 req-61ffcf0a-f15f-4508-a5c2-a4bfc04866fc service nova] Acquired lock "refresh_cache-e2138192-14e0-43d2-9d19-9820747d7217" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1608.714792] env[63024]: DEBUG nova.network.neutron [req-e0832c18-e6b6-4ab8-ab51-3cd7c940a094 req-61ffcf0a-f15f-4508-a5c2-a4bfc04866fc service nova] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Refreshing network info cache for port 630d0aef-3424-48b2-90be-fca999b2ed17 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1608.715976] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9f:98:e3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '13d625c9-77ec-4edb-a56b-9f37a314cc39', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '630d0aef-3424-48b2-90be-fca999b2ed17', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1608.723633] env[63024]: DEBUG oslo.service.loopingcall [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1608.723818] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 21.584s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1608.726869] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1608.727116] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2fd9cb52-2e07-4f26-a8d9-ee884a6638c2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.748984] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1608.748984] env[63024]: value = "task-1950473" [ 1608.748984] env[63024]: _type = "Task" [ 1608.748984] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1608.757960] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950473, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.896318] env[63024]: DEBUG oslo_vmware.api [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Task: {'id': task-1950469, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.945382} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1608.896584] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 17e1dfa2-b104-4aac-928e-6364da155c3d/17e1dfa2-b104-4aac-928e-6364da155c3d.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1608.896789] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1608.897039] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5bb089ec-c5b5-4350-8768-8af2da908ecc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.904298] env[63024]: DEBUG oslo_vmware.api [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Waiting for the task: (returnval){ [ 1608.904298] env[63024]: value = "task-1950474" [ 1608.904298] env[63024]: _type = "Task" [ 1608.904298] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1608.912578] env[63024]: DEBUG oslo_vmware.api [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Task: {'id': task-1950474, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1608.951729] env[63024]: DEBUG oslo_vmware.api [None req-4d31c815-cef6-4d7c-ba79-0fc82737621e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950468, 'name': Destroy_Task, 'duration_secs': 1.410605} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1608.952936] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-4d31c815-cef6-4d7c-ba79-0fc82737621e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Destroyed the VM [ 1608.952936] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4d31c815-cef6-4d7c-ba79-0fc82737621e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Deleting Snapshot of the VM instance {{(pid=63024) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1608.952936] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-908d2863-83ae-477e-822d-2bb7298e7719 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1608.958541] env[63024]: DEBUG oslo_vmware.api [None req-4d31c815-cef6-4d7c-ba79-0fc82737621e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Waiting for the task: (returnval){ [ 1608.958541] env[63024]: value = "task-1950475" [ 1608.958541] env[63024]: _type = "Task" [ 1608.958541] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1608.966296] env[63024]: DEBUG oslo_vmware.api [None req-4d31c815-cef6-4d7c-ba79-0fc82737621e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950475, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.003124] env[63024]: DEBUG oslo_concurrency.lockutils [req-01dfbaff-1440-4dfc-b61b-5cd216dfd9df req-4f7f2c9d-d70a-4370-99d6-024ee462e689 service nova] Releasing lock "refresh_cache-e03b8577-9298-4e88-98ea-6258e97db28d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1609.016846] env[63024]: DEBUG nova.network.neutron [req-3f47e54b-0f24-4548-b397-a1c4363c954c req-1c2d0b85-7218-4cf4-9c1a-67187cc2edb6 service nova] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Updated VIF entry in instance network info cache for port 879d1c91-c785-4da7-852e-abd159810127. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1609.017180] env[63024]: DEBUG nova.network.neutron [req-3f47e54b-0f24-4548-b397-a1c4363c954c req-1c2d0b85-7218-4cf4-9c1a-67187cc2edb6 service nova] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Updating instance_info_cache with network_info: [{"id": "879d1c91-c785-4da7-852e-abd159810127", "address": "fa:16:3e:d0:a4:cd", "network": {"id": "534b669b-9055-4679-81c0-841e6bd2ca48", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-851747787-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.227", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "851e1e0d455e4f67ba4bfc3e87eca7f7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "863474bc-a24a-4823-828c-580a187829e3", "external-id": "nsx-vlan-transportzone-617", "segmentation_id": 617, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap879d1c91-c7", "ovs_interfaceid": "879d1c91-c785-4da7-852e-abd159810127", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1609.059126] env[63024]: DEBUG oslo_concurrency.lockutils [None req-57b2ef90-159e-4c66-a16d-da3832e84635 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Acquiring lock "e03b8577-9298-4e88-98ea-6258e97db28d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1609.059126] env[63024]: DEBUG oslo_concurrency.lockutils [None req-57b2ef90-159e-4c66-a16d-da3832e84635 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Lock "e03b8577-9298-4e88-98ea-6258e97db28d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1609.059126] env[63024]: DEBUG oslo_concurrency.lockutils [None req-57b2ef90-159e-4c66-a16d-da3832e84635 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Acquiring lock "e03b8577-9298-4e88-98ea-6258e97db28d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1609.059666] env[63024]: DEBUG oslo_concurrency.lockutils [None req-57b2ef90-159e-4c66-a16d-da3832e84635 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Lock "e03b8577-9298-4e88-98ea-6258e97db28d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1609.059666] env[63024]: DEBUG oslo_concurrency.lockutils [None req-57b2ef90-159e-4c66-a16d-da3832e84635 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Lock "e03b8577-9298-4e88-98ea-6258e97db28d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1609.061965] env[63024]: INFO nova.compute.manager [None req-57b2ef90-159e-4c66-a16d-da3832e84635 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Terminating instance [ 1609.142257] env[63024]: DEBUG oslo_vmware.api [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': task-1950472, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.307546} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1609.142523] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1609.142730] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1609.142869] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1609.235024] env[63024]: DEBUG nova.compute.utils [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1609.235024] env[63024]: DEBUG nova.compute.manager [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1609.235024] env[63024]: DEBUG nova.network.neutron [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1609.253240] env[63024]: INFO nova.compute.claims [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1609.272813] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950473, 'name': CreateVM_Task, 'duration_secs': 0.375866} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1609.272813] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1609.273496] env[63024]: DEBUG oslo_concurrency.lockutils [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1609.273686] env[63024]: DEBUG oslo_concurrency.lockutils [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1609.275095] env[63024]: DEBUG oslo_concurrency.lockutils [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1609.275390] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed2c5f6d-3ef8-470a-9f77-dbc093ecdb20 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.283084] env[63024]: DEBUG oslo_vmware.api [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Waiting for the task: (returnval){ [ 1609.283084] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]524bb8e6-12e8-6313-2f7b-284f6e02e6aa" [ 1609.283084] env[63024]: _type = "Task" [ 1609.283084] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1609.296318] env[63024]: DEBUG oslo_vmware.api [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]524bb8e6-12e8-6313-2f7b-284f6e02e6aa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.389556] env[63024]: DEBUG nova.policy [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '28fab1e92c1d4491986100983f6b4ab1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6072e8931d9540ad8fe4a2b4b1ec782d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1609.420855] env[63024]: DEBUG oslo_vmware.api [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Task: {'id': task-1950474, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.157684} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1609.421146] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1609.421935] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e067875-bee6-4b57-8f06-67c21658958c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.446771] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Reconfiguring VM instance instance-00000010 to attach disk [datastore1] 17e1dfa2-b104-4aac-928e-6364da155c3d/17e1dfa2-b104-4aac-928e-6364da155c3d.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1609.447433] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-57c2ea5b-a9b4-4fa4-b1ba-75ec385daa94 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.474636] env[63024]: DEBUG oslo_vmware.api [None req-4d31c815-cef6-4d7c-ba79-0fc82737621e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950475, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.475891] env[63024]: DEBUG oslo_vmware.api [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Waiting for the task: (returnval){ [ 1609.475891] env[63024]: value = "task-1950476" [ 1609.475891] env[63024]: _type = "Task" [ 1609.475891] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1609.484484] env[63024]: DEBUG oslo_vmware.api [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Task: {'id': task-1950476, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.520168] env[63024]: DEBUG oslo_concurrency.lockutils [req-3f47e54b-0f24-4548-b397-a1c4363c954c req-1c2d0b85-7218-4cf4-9c1a-67187cc2edb6 service nova] Releasing lock "refresh_cache-04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1609.566327] env[63024]: DEBUG nova.compute.manager [None req-57b2ef90-159e-4c66-a16d-da3832e84635 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1609.566592] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-57b2ef90-159e-4c66-a16d-da3832e84635 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1609.567848] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29a4bffa-4d70-44c9-bc44-be1bf3372301 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.576615] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-57b2ef90-159e-4c66-a16d-da3832e84635 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1609.577198] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1c3eb0b7-1c0e-4b21-aa8d-c42d35daff6c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.585301] env[63024]: DEBUG oslo_vmware.api [None req-57b2ef90-159e-4c66-a16d-da3832e84635 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Waiting for the task: (returnval){ [ 1609.585301] env[63024]: value = "task-1950477" [ 1609.585301] env[63024]: _type = "Task" [ 1609.585301] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1609.593372] env[63024]: DEBUG oslo_vmware.api [None req-57b2ef90-159e-4c66-a16d-da3832e84635 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Task: {'id': task-1950477, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.738696] env[63024]: DEBUG nova.compute.manager [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1609.768338] env[63024]: INFO nova.compute.resource_tracker [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Updating resource usage from migration 85a4ec44-899c-4937-b93d-0eaabd8ff03f [ 1609.801467] env[63024]: DEBUG oslo_vmware.api [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]524bb8e6-12e8-6313-2f7b-284f6e02e6aa, 'name': SearchDatastore_Task, 'duration_secs': 0.029146} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1609.808669] env[63024]: DEBUG oslo_concurrency.lockutils [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1609.808905] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1609.809876] env[63024]: DEBUG oslo_concurrency.lockutils [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1609.809876] env[63024]: DEBUG oslo_concurrency.lockutils [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1609.809876] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1609.810723] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-99d3b518-68f0-4805-a237-af14e9809063 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.821526] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1609.822139] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1609.822735] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bb972fbb-9c6c-46f3-9e0a-f38119e280bc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1609.834443] env[63024]: DEBUG oslo_vmware.api [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Waiting for the task: (returnval){ [ 1609.834443] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52350e9f-c95a-287f-9de2-b0b0480584f6" [ 1609.834443] env[63024]: _type = "Task" [ 1609.834443] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1609.844608] env[63024]: DEBUG oslo_vmware.api [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52350e9f-c95a-287f-9de2-b0b0480584f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.979408] env[63024]: DEBUG oslo_vmware.api [None req-4d31c815-cef6-4d7c-ba79-0fc82737621e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950475, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1609.990079] env[63024]: DEBUG oslo_vmware.api [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Task: {'id': task-1950476, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.109220] env[63024]: DEBUG oslo_vmware.api [None req-57b2ef90-159e-4c66-a16d-da3832e84635 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Task: {'id': task-1950477, 'name': PowerOffVM_Task, 'duration_secs': 0.190968} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1610.110207] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-57b2ef90-159e-4c66-a16d-da3832e84635 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1610.110207] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-57b2ef90-159e-4c66-a16d-da3832e84635 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1610.110207] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0d05143f-5c6d-449b-b082-50b3b5b2ccb7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.161886] env[63024]: DEBUG nova.network.neutron [req-e0832c18-e6b6-4ab8-ab51-3cd7c940a094 req-61ffcf0a-f15f-4508-a5c2-a4bfc04866fc service nova] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Updated VIF entry in instance network info cache for port 630d0aef-3424-48b2-90be-fca999b2ed17. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1610.161886] env[63024]: DEBUG nova.network.neutron [req-e0832c18-e6b6-4ab8-ab51-3cd7c940a094 req-61ffcf0a-f15f-4508-a5c2-a4bfc04866fc service nova] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Updating instance_info_cache with network_info: [{"id": "630d0aef-3424-48b2-90be-fca999b2ed17", "address": "fa:16:3e:9f:98:e3", "network": {"id": "dab57617-8c96-4c9c-a117-05fd2262c951", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1124018667-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "751ed00ef16a4cca832e3c78731c9379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13d625c9-77ec-4edb-a56b-9f37a314cc39", "external-id": "nsx-vlan-transportzone-358", "segmentation_id": 358, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap630d0aef-34", "ovs_interfaceid": "630d0aef-3424-48b2-90be-fca999b2ed17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1610.216959] env[63024]: DEBUG nova.virt.hardware [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1610.216959] env[63024]: DEBUG nova.virt.hardware [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1610.216959] env[63024]: DEBUG nova.virt.hardware [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1610.216959] env[63024]: DEBUG nova.virt.hardware [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1610.217537] env[63024]: DEBUG nova.virt.hardware [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1610.217587] env[63024]: DEBUG nova.virt.hardware [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1610.218864] env[63024]: DEBUG nova.virt.hardware [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1610.218864] env[63024]: DEBUG nova.virt.hardware [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1610.218864] env[63024]: DEBUG nova.virt.hardware [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1610.218864] env[63024]: DEBUG nova.virt.hardware [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1610.218864] env[63024]: DEBUG nova.virt.hardware [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1610.220146] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97dfcfb9-f088-4694-8920-4f175dbb1655 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.234010] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f2882c5-ccd4-4343-9fbe-01c23a8ec304 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.253455] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Instance VIF info [] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1610.260140] env[63024]: DEBUG oslo.service.loopingcall [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1610.265100] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1610.265100] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2edfbf3b-a620-4258-8e7a-2336d5d0bfc0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.282988] env[63024]: DEBUG nova.network.neutron [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Successfully updated port: a3574d89-a818-4dbd-bf07-78ac14b00783 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1610.286933] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1610.286933] env[63024]: value = "task-1950479" [ 1610.286933] env[63024]: _type = "Task" [ 1610.286933] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.294736] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950479, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.352429] env[63024]: DEBUG oslo_vmware.api [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52350e9f-c95a-287f-9de2-b0b0480584f6, 'name': SearchDatastore_Task, 'duration_secs': 0.052027} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1610.353403] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ef061fc0-3659-43a1-944d-ff14a460153b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.362156] env[63024]: DEBUG oslo_vmware.api [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Waiting for the task: (returnval){ [ 1610.362156] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52dcfa9b-78e1-bb85-91ac-06140a6e587b" [ 1610.362156] env[63024]: _type = "Task" [ 1610.362156] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.369552] env[63024]: DEBUG oslo_vmware.api [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52dcfa9b-78e1-bb85-91ac-06140a6e587b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.385459] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd982bc0-fdc8-4fa3-b7ab-9d9236f74d68 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.393120] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82b98b56-d063-4053-a450-2fdcdceb9700 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.423125] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62060fb5-a302-4c7a-80ec-5e268b35b6a4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.430052] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7126f21c-6786-42bb-8c88-1ecd59051d13 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.443364] env[63024]: DEBUG nova.compute.provider_tree [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1610.477838] env[63024]: DEBUG oslo_vmware.api [None req-4d31c815-cef6-4d7c-ba79-0fc82737621e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950475, 'name': RemoveSnapshot_Task, 'duration_secs': 1.323321} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1610.477838] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4d31c815-cef6-4d7c-ba79-0fc82737621e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Deleted Snapshot of the VM instance {{(pid=63024) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1610.477838] env[63024]: INFO nova.compute.manager [None req-4d31c815-cef6-4d7c-ba79-0fc82737621e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Took 23.77 seconds to snapshot the instance on the hypervisor. [ 1610.488708] env[63024]: DEBUG oslo_vmware.api [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Task: {'id': task-1950476, 'name': ReconfigVM_Task, 'duration_secs': 0.817649} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1610.489107] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Reconfigured VM instance instance-00000010 to attach disk [datastore1] 17e1dfa2-b104-4aac-928e-6364da155c3d/17e1dfa2-b104-4aac-928e-6364da155c3d.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1610.490012] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a60a1137-c932-4f58-8e30-59fec75fc825 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.499025] env[63024]: DEBUG oslo_vmware.api [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Waiting for the task: (returnval){ [ 1610.499025] env[63024]: value = "task-1950480" [ 1610.499025] env[63024]: _type = "Task" [ 1610.499025] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.508273] env[63024]: DEBUG oslo_vmware.api [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Task: {'id': task-1950480, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.665036] env[63024]: DEBUG oslo_concurrency.lockutils [req-e0832c18-e6b6-4ab8-ab51-3cd7c940a094 req-61ffcf0a-f15f-4508-a5c2-a4bfc04866fc service nova] Releasing lock "refresh_cache-e2138192-14e0-43d2-9d19-9820747d7217" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1610.743337] env[63024]: DEBUG nova.network.neutron [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Successfully created port: 6e0e9732-b318-4b20-ad72-8c2bc07eaf34 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1610.763305] env[63024]: DEBUG nova.compute.manager [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1610.786253] env[63024]: DEBUG oslo_concurrency.lockutils [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Acquiring lock "refresh_cache-b765b8b3-a099-4e23-be30-d1178ecffc37" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1610.787265] env[63024]: DEBUG oslo_concurrency.lockutils [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Acquired lock "refresh_cache-b765b8b3-a099-4e23-be30-d1178ecffc37" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1610.787265] env[63024]: DEBUG nova.network.neutron [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1610.807388] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950479, 'name': CreateVM_Task, 'duration_secs': 0.298974} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1610.807388] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1610.807388] env[63024]: DEBUG oslo_concurrency.lockutils [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1610.807508] env[63024]: DEBUG oslo_concurrency.lockutils [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1610.807753] env[63024]: DEBUG oslo_concurrency.lockutils [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1610.810022] env[63024]: DEBUG nova.virt.hardware [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1610.810224] env[63024]: DEBUG nova.virt.hardware [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1610.810381] env[63024]: DEBUG nova.virt.hardware [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1610.810559] env[63024]: DEBUG nova.virt.hardware [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1610.810721] env[63024]: DEBUG nova.virt.hardware [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1610.810860] env[63024]: DEBUG nova.virt.hardware [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1610.811072] env[63024]: DEBUG nova.virt.hardware [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1610.811232] env[63024]: DEBUG nova.virt.hardware [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1610.811393] env[63024]: DEBUG nova.virt.hardware [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1610.811548] env[63024]: DEBUG nova.virt.hardware [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1610.811742] env[63024]: DEBUG nova.virt.hardware [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1610.812017] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e26e681f-783e-4b8a-be3a-b6b46d8db935 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.815530] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbd08acc-fa02-4cd6-ae21-ed76a78ed883 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.820928] env[63024]: DEBUG oslo_vmware.api [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Waiting for the task: (returnval){ [ 1610.820928] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52908760-ef4f-f65a-d399-a2b29134432e" [ 1610.820928] env[63024]: _type = "Task" [ 1610.820928] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.828722] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5ad4691-c884-44ad-b939-428d1bad9021 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.839914] env[63024]: DEBUG oslo_vmware.api [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52908760-ef4f-f65a-d399-a2b29134432e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.872228] env[63024]: DEBUG oslo_vmware.api [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52dcfa9b-78e1-bb85-91ac-06140a6e587b, 'name': SearchDatastore_Task, 'duration_secs': 0.016452} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1610.872557] env[63024]: DEBUG oslo_concurrency.lockutils [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1610.872968] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] e2138192-14e0-43d2-9d19-9820747d7217/e2138192-14e0-43d2-9d19-9820747d7217.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1610.873399] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8202b99a-3276-4cd2-803c-f3c52b866276 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1610.884868] env[63024]: DEBUG nova.network.neutron [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1610.891175] env[63024]: DEBUG oslo_vmware.api [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Waiting for the task: (returnval){ [ 1610.891175] env[63024]: value = "task-1950481" [ 1610.891175] env[63024]: _type = "Task" [ 1610.891175] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1610.898783] env[63024]: DEBUG oslo_vmware.api [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Task: {'id': task-1950481, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1610.947629] env[63024]: DEBUG nova.scheduler.client.report [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1611.017027] env[63024]: DEBUG oslo_vmware.api [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Task: {'id': task-1950480, 'name': Rename_Task, 'duration_secs': 0.401675} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1611.017301] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1611.017906] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-aa3315f7-fb7a-4301-a389-58b2858c19c9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.026019] env[63024]: DEBUG oslo_vmware.api [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Waiting for the task: (returnval){ [ 1611.026019] env[63024]: value = "task-1950482" [ 1611.026019] env[63024]: _type = "Task" [ 1611.026019] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.035389] env[63024]: DEBUG oslo_vmware.api [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Task: {'id': task-1950482, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.223812] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-57b2ef90-159e-4c66-a16d-da3832e84635 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1611.223812] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-57b2ef90-159e-4c66-a16d-da3832e84635 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1611.223980] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-57b2ef90-159e-4c66-a16d-da3832e84635 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Deleting the datastore file [datastore1] e03b8577-9298-4e88-98ea-6258e97db28d {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1611.225491] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3f1bdca8-eccd-4873-a5cd-90b40ce17417 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.230970] env[63024]: DEBUG oslo_vmware.api [None req-57b2ef90-159e-4c66-a16d-da3832e84635 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Waiting for the task: (returnval){ [ 1611.230970] env[63024]: value = "task-1950483" [ 1611.230970] env[63024]: _type = "Task" [ 1611.230970] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.242048] env[63024]: DEBUG oslo_vmware.api [None req-57b2ef90-159e-4c66-a16d-da3832e84635 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Task: {'id': task-1950483, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.309852] env[63024]: DEBUG nova.network.neutron [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Updating instance_info_cache with network_info: [{"id": "a3574d89-a818-4dbd-bf07-78ac14b00783", "address": "fa:16:3e:de:7c:88", "network": {"id": "b22bfe1a-f169-41c3-ac9b-fdcf93268110", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.82", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d78401abb63840f4b461856cfdb6dbbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3574d89-a8", "ovs_interfaceid": "a3574d89-a818-4dbd-bf07-78ac14b00783", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1611.342942] env[63024]: DEBUG oslo_vmware.api [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52908760-ef4f-f65a-d399-a2b29134432e, 'name': SearchDatastore_Task, 'duration_secs': 0.018866} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1611.347901] env[63024]: DEBUG oslo_concurrency.lockutils [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1611.348374] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1611.348870] env[63024]: DEBUG oslo_concurrency.lockutils [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1611.349142] env[63024]: DEBUG oslo_concurrency.lockutils [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1611.349438] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1611.351311] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1e1d2f54-ab07-4965-85aa-695e2fe272e3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.367896] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1611.368167] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1611.369481] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f580d3ff-861b-4fc7-950c-00cc6f2179a8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.376696] env[63024]: DEBUG oslo_vmware.api [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Waiting for the task: (returnval){ [ 1611.376696] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52a6f993-f9c7-a1fb-59c5-29029217d105" [ 1611.376696] env[63024]: _type = "Task" [ 1611.376696] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.385154] env[63024]: DEBUG oslo_vmware.api [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52a6f993-f9c7-a1fb-59c5-29029217d105, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.404805] env[63024]: DEBUG oslo_vmware.api [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Task: {'id': task-1950481, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.453427] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.729s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1611.453692] env[63024]: INFO nova.compute.manager [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Migrating [ 1611.453948] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1611.454186] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Acquired lock "compute-rpcapi-router" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1611.455853] env[63024]: DEBUG oslo_concurrency.lockutils [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.075s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1611.458125] env[63024]: INFO nova.compute.claims [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1611.461771] env[63024]: INFO nova.compute.rpcapi [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Automatically selected compute RPC version 6.4 from minimum service version 68 [ 1611.462415] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Releasing lock "compute-rpcapi-router" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1611.522813] env[63024]: DEBUG nova.compute.manager [req-e8c5f34b-911e-4d34-bf17-c712589aa086 req-2588a69b-e8e9-43af-b245-76394a47d12f service nova] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Received event network-vif-plugged-a3574d89-a818-4dbd-bf07-78ac14b00783 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1611.523146] env[63024]: DEBUG oslo_concurrency.lockutils [req-e8c5f34b-911e-4d34-bf17-c712589aa086 req-2588a69b-e8e9-43af-b245-76394a47d12f service nova] Acquiring lock "b765b8b3-a099-4e23-be30-d1178ecffc37-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1611.523437] env[63024]: DEBUG oslo_concurrency.lockutils [req-e8c5f34b-911e-4d34-bf17-c712589aa086 req-2588a69b-e8e9-43af-b245-76394a47d12f service nova] Lock "b765b8b3-a099-4e23-be30-d1178ecffc37-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1611.523616] env[63024]: DEBUG oslo_concurrency.lockutils [req-e8c5f34b-911e-4d34-bf17-c712589aa086 req-2588a69b-e8e9-43af-b245-76394a47d12f service nova] Lock "b765b8b3-a099-4e23-be30-d1178ecffc37-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1611.523791] env[63024]: DEBUG nova.compute.manager [req-e8c5f34b-911e-4d34-bf17-c712589aa086 req-2588a69b-e8e9-43af-b245-76394a47d12f service nova] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] No waiting events found dispatching network-vif-plugged-a3574d89-a818-4dbd-bf07-78ac14b00783 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1611.524065] env[63024]: WARNING nova.compute.manager [req-e8c5f34b-911e-4d34-bf17-c712589aa086 req-2588a69b-e8e9-43af-b245-76394a47d12f service nova] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Received unexpected event network-vif-plugged-a3574d89-a818-4dbd-bf07-78ac14b00783 for instance with vm_state building and task_state spawning. [ 1611.524291] env[63024]: DEBUG nova.compute.manager [req-e8c5f34b-911e-4d34-bf17-c712589aa086 req-2588a69b-e8e9-43af-b245-76394a47d12f service nova] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Received event network-changed-a3574d89-a818-4dbd-bf07-78ac14b00783 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1611.524461] env[63024]: DEBUG nova.compute.manager [req-e8c5f34b-911e-4d34-bf17-c712589aa086 req-2588a69b-e8e9-43af-b245-76394a47d12f service nova] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Refreshing instance network info cache due to event network-changed-a3574d89-a818-4dbd-bf07-78ac14b00783. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1611.524635] env[63024]: DEBUG oslo_concurrency.lockutils [req-e8c5f34b-911e-4d34-bf17-c712589aa086 req-2588a69b-e8e9-43af-b245-76394a47d12f service nova] Acquiring lock "refresh_cache-b765b8b3-a099-4e23-be30-d1178ecffc37" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1611.543474] env[63024]: DEBUG oslo_vmware.api [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Task: {'id': task-1950482, 'name': PowerOnVM_Task} progress is 1%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.749679] env[63024]: DEBUG oslo_vmware.api [None req-57b2ef90-159e-4c66-a16d-da3832e84635 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Task: {'id': task-1950483, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.469091} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1611.749679] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-57b2ef90-159e-4c66-a16d-da3832e84635 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1611.749679] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-57b2ef90-159e-4c66-a16d-da3832e84635 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1611.749679] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-57b2ef90-159e-4c66-a16d-da3832e84635 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1611.749679] env[63024]: INFO nova.compute.manager [None req-57b2ef90-159e-4c66-a16d-da3832e84635 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Took 2.18 seconds to destroy the instance on the hypervisor. [ 1611.750260] env[63024]: DEBUG oslo.service.loopingcall [None req-57b2ef90-159e-4c66-a16d-da3832e84635 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1611.750468] env[63024]: DEBUG nova.compute.manager [-] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1611.750612] env[63024]: DEBUG nova.network.neutron [-] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1611.814500] env[63024]: DEBUG oslo_concurrency.lockutils [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Releasing lock "refresh_cache-b765b8b3-a099-4e23-be30-d1178ecffc37" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1611.814925] env[63024]: DEBUG nova.compute.manager [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Instance network_info: |[{"id": "a3574d89-a818-4dbd-bf07-78ac14b00783", "address": "fa:16:3e:de:7c:88", "network": {"id": "b22bfe1a-f169-41c3-ac9b-fdcf93268110", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.82", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d78401abb63840f4b461856cfdb6dbbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3574d89-a8", "ovs_interfaceid": "a3574d89-a818-4dbd-bf07-78ac14b00783", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1611.815424] env[63024]: DEBUG oslo_concurrency.lockutils [req-e8c5f34b-911e-4d34-bf17-c712589aa086 req-2588a69b-e8e9-43af-b245-76394a47d12f service nova] Acquired lock "refresh_cache-b765b8b3-a099-4e23-be30-d1178ecffc37" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1611.815667] env[63024]: DEBUG nova.network.neutron [req-e8c5f34b-911e-4d34-bf17-c712589aa086 req-2588a69b-e8e9-43af-b245-76394a47d12f service nova] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Refreshing network info cache for port a3574d89-a818-4dbd-bf07-78ac14b00783 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1611.820654] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:de:7c:88', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8652fa2-e608-4fe6-8b35-402a40906b40', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a3574d89-a818-4dbd-bf07-78ac14b00783', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1611.834233] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Creating folder: Project (3bb863257aeb4fd68e2a10b72750f0ef). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1611.839128] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3338706a-0596-4987-ba94-085ae7ced7b8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.850479] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Created folder: Project (3bb863257aeb4fd68e2a10b72750f0ef) in parent group-v401959. [ 1611.850665] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Creating folder: Instances. Parent ref: group-v402014. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1611.850825] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-46f66c0a-ef19-448e-a438-a6a6c1854954 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.861697] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Created folder: Instances in parent group-v402014. [ 1611.861970] env[63024]: DEBUG oslo.service.loopingcall [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1611.862121] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1611.862343] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d732f281-65d2-46f3-ba83-6c7bfc41abb4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.899388] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1611.899388] env[63024]: value = "task-1950486" [ 1611.899388] env[63024]: _type = "Task" [ 1611.899388] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.904970] env[63024]: DEBUG oslo_vmware.api [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52a6f993-f9c7-a1fb-59c5-29029217d105, 'name': SearchDatastore_Task, 'duration_secs': 0.055025} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1611.908985] env[63024]: DEBUG oslo_vmware.api [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Task: {'id': task-1950481, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.785351} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1611.909644] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6886d6d0-5d8a-4025-8c15-46190a6e375b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.913071] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] e2138192-14e0-43d2-9d19-9820747d7217/e2138192-14e0-43d2-9d19-9820747d7217.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1611.913361] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1611.916755] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4275b085-77b7-41dd-87c0-ce8260db1cb9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1611.918662] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950486, 'name': CreateVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.925350] env[63024]: DEBUG oslo_vmware.api [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Waiting for the task: (returnval){ [ 1611.925350] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5231ca73-3315-3fe9-0464-32db15c7400a" [ 1611.925350] env[63024]: _type = "Task" [ 1611.925350] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.929958] env[63024]: DEBUG oslo_vmware.api [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Waiting for the task: (returnval){ [ 1611.929958] env[63024]: value = "task-1950487" [ 1611.929958] env[63024]: _type = "Task" [ 1611.929958] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1611.938571] env[63024]: DEBUG oslo_vmware.api [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5231ca73-3315-3fe9-0464-32db15c7400a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.943295] env[63024]: DEBUG oslo_vmware.api [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Task: {'id': task-1950487, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1611.982241] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Acquiring lock "refresh_cache-f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1611.982469] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Acquired lock "refresh_cache-f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1611.982650] env[63024]: DEBUG nova.network.neutron [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1612.039619] env[63024]: DEBUG oslo_vmware.api [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Task: {'id': task-1950482, 'name': PowerOnVM_Task} progress is 64%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.411013] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950486, 'name': CreateVM_Task, 'duration_secs': 0.39832} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1612.411013] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1612.411585] env[63024]: DEBUG oslo_concurrency.lockutils [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1612.411854] env[63024]: DEBUG oslo_concurrency.lockutils [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1612.412505] env[63024]: DEBUG oslo_concurrency.lockutils [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1612.412862] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ced5ef6-06ad-4aa3-bf24-9b978b04c5d8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.418022] env[63024]: DEBUG oslo_vmware.api [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Waiting for the task: (returnval){ [ 1612.418022] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52db86e3-fb34-c790-f7a9-f7554224b3cc" [ 1612.418022] env[63024]: _type = "Task" [ 1612.418022] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1612.431019] env[63024]: DEBUG oslo_vmware.api [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52db86e3-fb34-c790-f7a9-f7554224b3cc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.438504] env[63024]: DEBUG oslo_vmware.api [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Task: {'id': task-1950487, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.090307} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1612.446019] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1612.446019] env[63024]: DEBUG oslo_vmware.api [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5231ca73-3315-3fe9-0464-32db15c7400a, 'name': SearchDatastore_Task, 'duration_secs': 0.011356} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1612.446019] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46f2c8c4-3845-421a-bea5-357da7ec4434 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.447404] env[63024]: DEBUG oslo_concurrency.lockutils [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1612.447806] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 726d9639-1ab4-46a9-975e-5580c8344a37/726d9639-1ab4-46a9-975e-5580c8344a37.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1612.449162] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4d243324-4b48-4063-b979-380ec72b7989 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.475576] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Reconfiguring VM instance instance-00000011 to attach disk [datastore1] e2138192-14e0-43d2-9d19-9820747d7217/e2138192-14e0-43d2-9d19-9820747d7217.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1612.480081] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-414c1088-988a-4f51-b63e-e4c469e154cb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.497810] env[63024]: DEBUG oslo_vmware.api [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Waiting for the task: (returnval){ [ 1612.497810] env[63024]: value = "task-1950488" [ 1612.497810] env[63024]: _type = "Task" [ 1612.497810] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1612.509096] env[63024]: DEBUG oslo_vmware.api [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Waiting for the task: (returnval){ [ 1612.509096] env[63024]: value = "task-1950489" [ 1612.509096] env[63024]: _type = "Task" [ 1612.509096] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1612.513127] env[63024]: DEBUG oslo_vmware.api [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': task-1950488, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.523749] env[63024]: DEBUG oslo_vmware.api [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Task: {'id': task-1950489, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1612.540574] env[63024]: DEBUG oslo_vmware.api [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Task: {'id': task-1950482, 'name': PowerOnVM_Task, 'duration_secs': 1.35766} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1612.541867] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1612.542244] env[63024]: INFO nova.compute.manager [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Took 11.81 seconds to spawn the instance on the hypervisor. [ 1612.542522] env[63024]: DEBUG nova.compute.manager [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1612.543407] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38694aae-6453-4b9f-9d4e-d477e4c47896 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.815038] env[63024]: DEBUG nova.network.neutron [req-e8c5f34b-911e-4d34-bf17-c712589aa086 req-2588a69b-e8e9-43af-b245-76394a47d12f service nova] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Updated VIF entry in instance network info cache for port a3574d89-a818-4dbd-bf07-78ac14b00783. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1612.815038] env[63024]: DEBUG nova.network.neutron [req-e8c5f34b-911e-4d34-bf17-c712589aa086 req-2588a69b-e8e9-43af-b245-76394a47d12f service nova] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Updating instance_info_cache with network_info: [{"id": "a3574d89-a818-4dbd-bf07-78ac14b00783", "address": "fa:16:3e:de:7c:88", "network": {"id": "b22bfe1a-f169-41c3-ac9b-fdcf93268110", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.82", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d78401abb63840f4b461856cfdb6dbbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa3574d89-a8", "ovs_interfaceid": "a3574d89-a818-4dbd-bf07-78ac14b00783", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1612.938252] env[63024]: DEBUG oslo_vmware.api [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52db86e3-fb34-c790-f7a9-f7554224b3cc, 'name': SearchDatastore_Task, 'duration_secs': 0.009734} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1612.942916] env[63024]: DEBUG oslo_concurrency.lockutils [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1612.942916] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1612.943113] env[63024]: DEBUG oslo_concurrency.lockutils [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1612.943113] env[63024]: DEBUG oslo_concurrency.lockutils [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1612.943652] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1612.943830] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6046703f-d8d7-47ac-b0a6-c7691b948afd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.979059] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1612.979832] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1612.980129] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40c78b3e-6977-40a8-8428-5af7b90e7b9f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1612.993869] env[63024]: DEBUG oslo_vmware.api [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Waiting for the task: (returnval){ [ 1612.993869] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52ff7e23-8526-74e1-2215-da47e44e78c2" [ 1612.993869] env[63024]: _type = "Task" [ 1612.993869] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1613.013966] env[63024]: DEBUG oslo_vmware.api [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52ff7e23-8526-74e1-2215-da47e44e78c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.024135] env[63024]: DEBUG oslo_vmware.api [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': task-1950488, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.030126] env[63024]: DEBUG oslo_vmware.api [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Task: {'id': task-1950489, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.068778] env[63024]: INFO nova.compute.manager [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Took 31.76 seconds to build instance. [ 1613.072920] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7b564f9-0958-4ac4-b1cc-0cd19a2676d7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.081143] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63d3604b-7679-4143-bdb6-74efea9d3ff1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.114427] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-426ff1f5-4cba-416a-b823-da8a97823fdd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.123014] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a555e4d4-b0d3-4c99-80a9-f85c38d762a9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.139786] env[63024]: DEBUG nova.compute.provider_tree [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1613.196612] env[63024]: DEBUG nova.network.neutron [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Updating instance_info_cache with network_info: [{"id": "90fdf5d2-f22d-4b1f-9b65-a0975e5c1cd3", "address": "fa:16:3e:64:8e:6a", "network": {"id": "b22bfe1a-f169-41c3-ac9b-fdcf93268110", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d78401abb63840f4b461856cfdb6dbbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90fdf5d2-f2", "ovs_interfaceid": "90fdf5d2-f22d-4b1f-9b65-a0975e5c1cd3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1613.286848] env[63024]: DEBUG nova.network.neutron [-] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1613.323717] env[63024]: DEBUG oslo_concurrency.lockutils [req-e8c5f34b-911e-4d34-bf17-c712589aa086 req-2588a69b-e8e9-43af-b245-76394a47d12f service nova] Releasing lock "refresh_cache-b765b8b3-a099-4e23-be30-d1178ecffc37" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1613.506673] env[63024]: DEBUG oslo_vmware.api [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52ff7e23-8526-74e1-2215-da47e44e78c2, 'name': SearchDatastore_Task, 'duration_secs': 0.026062} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1613.510483] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41b45513-7f1f-49db-bf1d-4b116d0ce296 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.521446] env[63024]: DEBUG oslo_vmware.api [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Waiting for the task: (returnval){ [ 1613.521446] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52939098-3459-3d81-8a79-7bf467fee397" [ 1613.521446] env[63024]: _type = "Task" [ 1613.521446] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1613.522466] env[63024]: DEBUG oslo_vmware.api [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': task-1950488, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.528741] env[63024]: DEBUG oslo_vmware.api [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Task: {'id': task-1950489, 'name': ReconfigVM_Task, 'duration_secs': 0.801053} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1613.532886] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Reconfigured VM instance instance-00000011 to attach disk [datastore1] e2138192-14e0-43d2-9d19-9820747d7217/e2138192-14e0-43d2-9d19-9820747d7217.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1613.532886] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b1b07275-0d95-4e32-b2f6-83f5b44cc18d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1613.534789] env[63024]: DEBUG oslo_vmware.api [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52939098-3459-3d81-8a79-7bf467fee397, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.547849] env[63024]: DEBUG oslo_vmware.api [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Waiting for the task: (returnval){ [ 1613.547849] env[63024]: value = "task-1950490" [ 1613.547849] env[63024]: _type = "Task" [ 1613.547849] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1613.557638] env[63024]: DEBUG oslo_vmware.api [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Task: {'id': task-1950490, 'name': Rename_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1613.572494] env[63024]: DEBUG oslo_concurrency.lockutils [None req-980f018b-0d26-43ff-a211-07a6e7258865 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Lock "17e1dfa2-b104-4aac-928e-6364da155c3d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.093s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1613.643822] env[63024]: DEBUG nova.scheduler.client.report [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1613.699391] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Releasing lock "refresh_cache-f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1613.702579] env[63024]: DEBUG nova.network.neutron [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Successfully updated port: 6e0e9732-b318-4b20-ad72-8c2bc07eaf34 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1613.790270] env[63024]: INFO nova.compute.manager [-] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Took 2.04 seconds to deallocate network for instance. [ 1613.909085] env[63024]: DEBUG oslo_concurrency.lockutils [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Acquiring lock "18444b47-476a-4ca3-9a4f-0dc58e652143" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1613.909364] env[63024]: DEBUG oslo_concurrency.lockutils [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Lock "18444b47-476a-4ca3-9a4f-0dc58e652143" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1614.019855] env[63024]: DEBUG oslo_vmware.api [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': task-1950488, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.038128] env[63024]: DEBUG oslo_vmware.api [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52939098-3459-3d81-8a79-7bf467fee397, 'name': SearchDatastore_Task, 'duration_secs': 0.05269} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1614.038128] env[63024]: DEBUG oslo_concurrency.lockutils [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1614.038128] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] b765b8b3-a099-4e23-be30-d1178ecffc37/b765b8b3-a099-4e23-be30-d1178ecffc37.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1614.038128] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d0fa01d3-919c-4e6a-8fe6-00615ea4cd4c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.042255] env[63024]: DEBUG oslo_vmware.api [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Waiting for the task: (returnval){ [ 1614.042255] env[63024]: value = "task-1950491" [ 1614.042255] env[63024]: _type = "Task" [ 1614.042255] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1614.056597] env[63024]: DEBUG oslo_vmware.api [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950491, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.062113] env[63024]: DEBUG oslo_vmware.api [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Task: {'id': task-1950490, 'name': Rename_Task, 'duration_secs': 0.414966} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1614.062468] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1614.062854] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d3759b8f-0185-4487-b47b-689edc256727 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.068866] env[63024]: DEBUG oslo_vmware.api [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Waiting for the task: (returnval){ [ 1614.068866] env[63024]: value = "task-1950492" [ 1614.068866] env[63024]: _type = "Task" [ 1614.068866] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1614.077263] env[63024]: DEBUG nova.compute.manager [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1614.080703] env[63024]: DEBUG oslo_vmware.api [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Task: {'id': task-1950492, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.152072] env[63024]: DEBUG oslo_concurrency.lockutils [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.693s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1614.152072] env[63024]: DEBUG nova.compute.manager [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1614.152678] env[63024]: DEBUG oslo_concurrency.lockutils [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.238s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1614.154433] env[63024]: INFO nova.compute.claims [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1614.208811] env[63024]: DEBUG oslo_concurrency.lockutils [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "refresh_cache-9716d592-32d1-4f1d-b42b-1c8a7d81d2f2" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1614.208929] env[63024]: DEBUG oslo_concurrency.lockutils [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquired lock "refresh_cache-9716d592-32d1-4f1d-b42b-1c8a7d81d2f2" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1614.209549] env[63024]: DEBUG nova.network.neutron [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1614.299601] env[63024]: DEBUG oslo_concurrency.lockutils [None req-57b2ef90-159e-4c66-a16d-da3832e84635 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1614.339100] env[63024]: DEBUG nova.compute.manager [req-1535dd25-cb98-4cc4-afe0-b9e1af08b728 req-3de38b30-8d98-4459-8d8c-1006efbd37c7 service nova] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Received event network-vif-deleted-d962584b-9fa7-4c73-b446-b432b537aafd {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1614.339100] env[63024]: DEBUG nova.compute.manager [req-1535dd25-cb98-4cc4-afe0-b9e1af08b728 req-3de38b30-8d98-4459-8d8c-1006efbd37c7 service nova] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Received event network-vif-plugged-6e0e9732-b318-4b20-ad72-8c2bc07eaf34 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1614.339100] env[63024]: DEBUG oslo_concurrency.lockutils [req-1535dd25-cb98-4cc4-afe0-b9e1af08b728 req-3de38b30-8d98-4459-8d8c-1006efbd37c7 service nova] Acquiring lock "9716d592-32d1-4f1d-b42b-1c8a7d81d2f2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1614.339759] env[63024]: DEBUG oslo_concurrency.lockutils [req-1535dd25-cb98-4cc4-afe0-b9e1af08b728 req-3de38b30-8d98-4459-8d8c-1006efbd37c7 service nova] Lock "9716d592-32d1-4f1d-b42b-1c8a7d81d2f2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1614.340141] env[63024]: DEBUG oslo_concurrency.lockutils [req-1535dd25-cb98-4cc4-afe0-b9e1af08b728 req-3de38b30-8d98-4459-8d8c-1006efbd37c7 service nova] Lock "9716d592-32d1-4f1d-b42b-1c8a7d81d2f2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1614.340537] env[63024]: DEBUG nova.compute.manager [req-1535dd25-cb98-4cc4-afe0-b9e1af08b728 req-3de38b30-8d98-4459-8d8c-1006efbd37c7 service nova] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] No waiting events found dispatching network-vif-plugged-6e0e9732-b318-4b20-ad72-8c2bc07eaf34 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1614.340847] env[63024]: WARNING nova.compute.manager [req-1535dd25-cb98-4cc4-afe0-b9e1af08b728 req-3de38b30-8d98-4459-8d8c-1006efbd37c7 service nova] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Received unexpected event network-vif-plugged-6e0e9732-b318-4b20-ad72-8c2bc07eaf34 for instance with vm_state building and task_state spawning. [ 1614.519088] env[63024]: DEBUG oslo_vmware.api [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': task-1950488, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.594219} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1614.519282] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 726d9639-1ab4-46a9-975e-5580c8344a37/726d9639-1ab4-46a9-975e-5580c8344a37.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1614.519571] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1614.519837] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-48694af3-6b7d-48f7-a024-61b919109259 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1614.528156] env[63024]: DEBUG oslo_vmware.api [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Waiting for the task: (returnval){ [ 1614.528156] env[63024]: value = "task-1950493" [ 1614.528156] env[63024]: _type = "Task" [ 1614.528156] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1614.542538] env[63024]: DEBUG oslo_vmware.api [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': task-1950493, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.557691] env[63024]: DEBUG oslo_vmware.api [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950491, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.580071] env[63024]: DEBUG oslo_vmware.api [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Task: {'id': task-1950492, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1614.605313] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1614.663431] env[63024]: DEBUG nova.compute.utils [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1614.665321] env[63024]: DEBUG nova.compute.manager [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1614.665552] env[63024]: DEBUG nova.network.neutron [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1614.757236] env[63024]: DEBUG nova.policy [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8ef4d30001db4c8e9e54b343f2d5a323', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3bb863257aeb4fd68e2a10b72750f0ef', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1614.765658] env[63024]: DEBUG nova.network.neutron [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1615.040154] env[63024]: DEBUG oslo_vmware.api [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': task-1950493, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.232814} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1615.040656] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1615.041627] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9e3b5af-7ab1-4721-acdb-7c7c91c1cc54 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.062969] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Reconfiguring VM instance instance-0000000f to attach disk [datastore1] 726d9639-1ab4-46a9-975e-5580c8344a37/726d9639-1ab4-46a9-975e-5580c8344a37.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1615.066422] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-02a00c49-1275-44b3-922d-f3af6de6e530 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.082623] env[63024]: DEBUG oslo_vmware.api [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950491, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.090069] env[63024]: DEBUG oslo_vmware.api [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Task: {'id': task-1950492, 'name': PowerOnVM_Task, 'duration_secs': 0.921058} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1615.090218] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1615.090314] env[63024]: INFO nova.compute.manager [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Took 9.76 seconds to spawn the instance on the hypervisor. [ 1615.090477] env[63024]: DEBUG nova.compute.manager [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1615.090893] env[63024]: DEBUG oslo_vmware.api [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Waiting for the task: (returnval){ [ 1615.090893] env[63024]: value = "task-1950494" [ 1615.090893] env[63024]: _type = "Task" [ 1615.090893] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1615.091481] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6201c18c-612b-4f47-9cfa-cf0ab0a683a1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.104982] env[63024]: DEBUG oslo_vmware.api [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': task-1950494, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.161750] env[63024]: DEBUG nova.network.neutron [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Updating instance_info_cache with network_info: [{"id": "6e0e9732-b318-4b20-ad72-8c2bc07eaf34", "address": "fa:16:3e:2b:cc:65", "network": {"id": "0cbe22f7-6322-4d92-9a77-2753f6449a2d", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-366684254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6072e8931d9540ad8fe4a2b4b1ec782d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3ccbdbb-8b49-4a26-913f-2a448b72280f", "external-id": "nsx-vlan-transportzone-412", "segmentation_id": 412, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e0e9732-b3", "ovs_interfaceid": "6e0e9732-b318-4b20-ad72-8c2bc07eaf34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1615.172853] env[63024]: DEBUG nova.compute.manager [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1615.221450] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b5be0ec-293f-4fa9-9559-396ba7118166 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.245462] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Updating instance 'f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df' progress to 0 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1615.418195] env[63024]: DEBUG nova.network.neutron [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Successfully created port: 4a4fca95-5ff6-49cc-8848-f863d762cb8c {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1615.558052] env[63024]: DEBUG oslo_vmware.api [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950491, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.614470] env[63024]: DEBUG oslo_vmware.api [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': task-1950494, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.615146] env[63024]: INFO nova.compute.manager [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Took 33.78 seconds to build instance. [ 1615.643813] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c7e273d-2e48-401a-a922-624675e2c541 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.652909] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1594201d-d3b0-408e-8cef-5a691f5a2689 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.693018] env[63024]: DEBUG oslo_concurrency.lockutils [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Releasing lock "refresh_cache-9716d592-32d1-4f1d-b42b-1c8a7d81d2f2" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1615.693018] env[63024]: DEBUG nova.compute.manager [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Instance network_info: |[{"id": "6e0e9732-b318-4b20-ad72-8c2bc07eaf34", "address": "fa:16:3e:2b:cc:65", "network": {"id": "0cbe22f7-6322-4d92-9a77-2753f6449a2d", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-366684254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6072e8931d9540ad8fe4a2b4b1ec782d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3ccbdbb-8b49-4a26-913f-2a448b72280f", "external-id": "nsx-vlan-transportzone-412", "segmentation_id": 412, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e0e9732-b3", "ovs_interfaceid": "6e0e9732-b318-4b20-ad72-8c2bc07eaf34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1615.695442] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2b:cc:65', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f3ccbdbb-8b49-4a26-913f-2a448b72280f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6e0e9732-b318-4b20-ad72-8c2bc07eaf34', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1615.704113] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Creating folder: Project (6072e8931d9540ad8fe4a2b4b1ec782d). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1615.705131] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04ef90f2-071f-4d47-a2bb-767b07007f7d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.708555] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cab851d9-a9a8-4658-b8a1-fc2b7c2c7186 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.718399] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb2e4b5b-4a8d-40df-a443-056d4d8c113f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.724430] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Created folder: Project (6072e8931d9540ad8fe4a2b4b1ec782d) in parent group-v401959. [ 1615.724921] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Creating folder: Instances. Parent ref: group-v402017. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1615.726218] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dff27027-34bd-4f7f-bf92-04e1f191acc9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.736266] env[63024]: DEBUG nova.compute.provider_tree [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1615.746396] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Created folder: Instances in parent group-v402017. [ 1615.747102] env[63024]: DEBUG oslo.service.loopingcall [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1615.747317] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1615.747552] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eda15ecf-064a-4f96-b50e-28cbeeb3c38f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.764518] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1615.764693] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f61150e1-3cbb-423b-962d-09c18fe10ad1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1615.772620] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1615.772620] env[63024]: value = "task-1950497" [ 1615.772620] env[63024]: _type = "Task" [ 1615.772620] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1615.775017] env[63024]: DEBUG oslo_vmware.api [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Waiting for the task: (returnval){ [ 1615.775017] env[63024]: value = "task-1950498" [ 1615.775017] env[63024]: _type = "Task" [ 1615.775017] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1615.791995] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950497, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1615.794965] env[63024]: DEBUG oslo_vmware.api [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950498, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.054024] env[63024]: DEBUG oslo_vmware.api [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950491, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.539023} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1616.054280] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] b765b8b3-a099-4e23-be30-d1178ecffc37/b765b8b3-a099-4e23-be30-d1178ecffc37.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1616.057034] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1616.057034] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d9d49c64-f140-4b20-9e45-459f945be864 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.061969] env[63024]: DEBUG oslo_vmware.api [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Waiting for the task: (returnval){ [ 1616.061969] env[63024]: value = "task-1950499" [ 1616.061969] env[63024]: _type = "Task" [ 1616.061969] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.070894] env[63024]: DEBUG oslo_vmware.api [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950499, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.104884] env[63024]: DEBUG oslo_vmware.api [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': task-1950494, 'name': ReconfigVM_Task, 'duration_secs': 0.671152} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1616.105262] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Reconfigured VM instance instance-0000000f to attach disk [datastore1] 726d9639-1ab4-46a9-975e-5580c8344a37/726d9639-1ab4-46a9-975e-5580c8344a37.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1616.106324] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-16aeb7ac-6e01-4058-8cb7-500fb46d959e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.113366] env[63024]: DEBUG oslo_vmware.api [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Waiting for the task: (returnval){ [ 1616.113366] env[63024]: value = "task-1950500" [ 1616.113366] env[63024]: _type = "Task" [ 1616.113366] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.118145] env[63024]: DEBUG oslo_concurrency.lockutils [None req-76eedfaa-5fc1-4434-964f-c2ca169f5164 tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Lock "e2138192-14e0-43d2-9d19-9820747d7217" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.569s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1616.123722] env[63024]: DEBUG oslo_vmware.api [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': task-1950500, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.211054] env[63024]: DEBUG nova.compute.manager [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1616.240794] env[63024]: DEBUG nova.scheduler.client.report [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1616.247549] env[63024]: DEBUG nova.virt.hardware [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1616.247845] env[63024]: DEBUG nova.virt.hardware [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1616.248033] env[63024]: DEBUG nova.virt.hardware [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1616.248230] env[63024]: DEBUG nova.virt.hardware [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1616.248403] env[63024]: DEBUG nova.virt.hardware [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1616.248589] env[63024]: DEBUG nova.virt.hardware [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1616.248806] env[63024]: DEBUG nova.virt.hardware [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1616.248965] env[63024]: DEBUG nova.virt.hardware [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1616.249153] env[63024]: DEBUG nova.virt.hardware [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1616.249316] env[63024]: DEBUG nova.virt.hardware [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1616.249558] env[63024]: DEBUG nova.virt.hardware [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1616.250548] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36024a45-d0b7-4e8e-ab09-1ef78c05ca39 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.260323] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a68cad9-a269-41ca-a266-138d97bd9397 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.286144] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950497, 'name': CreateVM_Task, 'duration_secs': 0.507643} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1616.290267] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1616.290980] env[63024]: DEBUG oslo_concurrency.lockutils [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1616.291177] env[63024]: DEBUG oslo_concurrency.lockutils [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1616.291462] env[63024]: DEBUG oslo_concurrency.lockutils [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1616.292113] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d9308e1-a0b1-4386-87e2-02b4cf998ded {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.296560] env[63024]: DEBUG oslo_vmware.api [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950498, 'name': PowerOffVM_Task, 'duration_secs': 0.255938} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1616.297147] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1616.297275] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Updating instance 'f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df' progress to 17 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1616.302912] env[63024]: DEBUG oslo_vmware.api [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 1616.302912] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52356b2d-9c11-bd80-ac18-8a1c3874ba5a" [ 1616.302912] env[63024]: _type = "Task" [ 1616.302912] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.311478] env[63024]: DEBUG oslo_vmware.api [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52356b2d-9c11-bd80-ac18-8a1c3874ba5a, 'name': SearchDatastore_Task, 'duration_secs': 0.008649} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1616.311765] env[63024]: DEBUG oslo_concurrency.lockutils [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1616.312053] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1616.312397] env[63024]: DEBUG oslo_concurrency.lockutils [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1616.312483] env[63024]: DEBUG oslo_concurrency.lockutils [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1616.312728] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1616.312996] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fb6932ee-83a5-43a7-8608-0e954a079b69 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.330476] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1616.330705] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1616.332025] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87b2f035-0fb8-45ea-9bdd-6d1a95945462 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.337192] env[63024]: DEBUG oslo_vmware.api [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 1616.337192] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52d77dec-88f7-fd2e-c411-1e4354181479" [ 1616.337192] env[63024]: _type = "Task" [ 1616.337192] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.344827] env[63024]: DEBUG oslo_vmware.api [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52d77dec-88f7-fd2e-c411-1e4354181479, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.572066] env[63024]: DEBUG oslo_vmware.api [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950499, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.109294} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1616.572337] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1616.573108] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-871e535e-9711-4462-bfa8-3493d8f431e7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.604795] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Reconfiguring VM instance instance-00000012 to attach disk [datastore1] b765b8b3-a099-4e23-be30-d1178ecffc37/b765b8b3-a099-4e23-be30-d1178ecffc37.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1616.605217] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9a958333-c534-4907-8fe9-877b696ddf84 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.626881] env[63024]: DEBUG nova.compute.manager [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1616.635467] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Acquiring lock "94d9210e-ca8d-4ef1-a640-2d9a11ad87d3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1616.635591] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Lock "94d9210e-ca8d-4ef1-a640-2d9a11ad87d3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1616.640843] env[63024]: DEBUG oslo_vmware.api [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': task-1950500, 'name': Rename_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.643474] env[63024]: DEBUG oslo_vmware.api [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Waiting for the task: (returnval){ [ 1616.643474] env[63024]: value = "task-1950501" [ 1616.643474] env[63024]: _type = "Task" [ 1616.643474] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.649793] env[63024]: DEBUG oslo_vmware.api [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950501, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.755429] env[63024]: DEBUG oslo_concurrency.lockutils [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.603s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1616.756018] env[63024]: DEBUG nova.compute.manager [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1616.763580] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.327s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1616.763580] env[63024]: INFO nova.compute.claims [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1616.792359] env[63024]: DEBUG nova.compute.manager [None req-92210e6b-9b1b-4682-9fd6-bc1cf800494e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1616.793698] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c18e0df-aff5-4d59-bb4a-73aee2a2c612 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.805291] env[63024]: DEBUG nova.virt.hardware [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1616.806233] env[63024]: DEBUG nova.virt.hardware [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1616.806433] env[63024]: DEBUG nova.virt.hardware [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1616.806630] env[63024]: DEBUG nova.virt.hardware [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1616.806774] env[63024]: DEBUG nova.virt.hardware [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1616.806925] env[63024]: DEBUG nova.virt.hardware [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1616.807135] env[63024]: DEBUG nova.virt.hardware [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1616.807310] env[63024]: DEBUG nova.virt.hardware [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1616.808317] env[63024]: DEBUG nova.virt.hardware [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1616.808317] env[63024]: DEBUG nova.virt.hardware [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1616.808317] env[63024]: DEBUG nova.virt.hardware [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1616.815582] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d5c511a1-a4ab-4252-b817-9e3f6af767cc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.834793] env[63024]: DEBUG oslo_vmware.api [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Waiting for the task: (returnval){ [ 1616.834793] env[63024]: value = "task-1950502" [ 1616.834793] env[63024]: _type = "Task" [ 1616.834793] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.847447] env[63024]: DEBUG oslo_vmware.api [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950502, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1616.851024] env[63024]: DEBUG oslo_vmware.api [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52d77dec-88f7-fd2e-c411-1e4354181479, 'name': SearchDatastore_Task, 'duration_secs': 0.01044} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1616.851790] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6780f562-88e6-4dfa-9f43-f414cb75ccde {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1616.860017] env[63024]: DEBUG oslo_vmware.api [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 1616.860017] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52941e95-6d84-c7e8-4805-f639d148e1a5" [ 1616.860017] env[63024]: _type = "Task" [ 1616.860017] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1616.864941] env[63024]: DEBUG oslo_vmware.api [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52941e95-6d84-c7e8-4805-f639d148e1a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.137978] env[63024]: DEBUG oslo_vmware.api [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': task-1950500, 'name': Rename_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.152829] env[63024]: DEBUG oslo_vmware.api [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950501, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.157146] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1617.268188] env[63024]: DEBUG nova.compute.utils [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1617.273060] env[63024]: DEBUG nova.compute.manager [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1617.273060] env[63024]: DEBUG nova.network.neutron [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1617.331442] env[63024]: INFO nova.compute.manager [None req-92210e6b-9b1b-4682-9fd6-bc1cf800494e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] instance snapshotting [ 1617.334330] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18716d69-e9a9-4be0-b51e-65ad00f75fa7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.345624] env[63024]: DEBUG oslo_vmware.api [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950502, 'name': ReconfigVM_Task, 'duration_secs': 0.430944} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.358597] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Updating instance 'f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df' progress to 33 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1617.366790] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3492fa3a-06e4-457a-9bf8-a3e9ebcf0cdc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.378898] env[63024]: DEBUG oslo_vmware.api [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52941e95-6d84-c7e8-4805-f639d148e1a5, 'name': SearchDatastore_Task, 'duration_secs': 0.020399} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.379417] env[63024]: DEBUG oslo_concurrency.lockutils [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1617.380031] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2/9716d592-32d1-4f1d-b42b-1c8a7d81d2f2.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1617.380031] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-de23e8c1-f06e-4044-83dc-ab7276a5956f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.388473] env[63024]: DEBUG oslo_vmware.api [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 1617.388473] env[63024]: value = "task-1950503" [ 1617.388473] env[63024]: _type = "Task" [ 1617.388473] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.391508] env[63024]: DEBUG nova.policy [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6fc84a6eed984429b26a693ce7b0876e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9521048e807c4ca2a6d2e74a72b829a3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1617.399135] env[63024]: DEBUG oslo_vmware.api [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1950503, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.432076] env[63024]: DEBUG nova.compute.manager [req-0d91ddbf-0ca9-4ea7-84fe-f9789c22e9eb req-7595bede-600c-44a8-b8c9-cb43db46cd8e service nova] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Received event network-changed-6e0e9732-b318-4b20-ad72-8c2bc07eaf34 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1617.432292] env[63024]: DEBUG nova.compute.manager [req-0d91ddbf-0ca9-4ea7-84fe-f9789c22e9eb req-7595bede-600c-44a8-b8c9-cb43db46cd8e service nova] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Refreshing instance network info cache due to event network-changed-6e0e9732-b318-4b20-ad72-8c2bc07eaf34. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1617.432534] env[63024]: DEBUG oslo_concurrency.lockutils [req-0d91ddbf-0ca9-4ea7-84fe-f9789c22e9eb req-7595bede-600c-44a8-b8c9-cb43db46cd8e service nova] Acquiring lock "refresh_cache-9716d592-32d1-4f1d-b42b-1c8a7d81d2f2" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1617.432725] env[63024]: DEBUG oslo_concurrency.lockutils [req-0d91ddbf-0ca9-4ea7-84fe-f9789c22e9eb req-7595bede-600c-44a8-b8c9-cb43db46cd8e service nova] Acquired lock "refresh_cache-9716d592-32d1-4f1d-b42b-1c8a7d81d2f2" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1617.432922] env[63024]: DEBUG nova.network.neutron [req-0d91ddbf-0ca9-4ea7-84fe-f9789c22e9eb req-7595bede-600c-44a8-b8c9-cb43db46cd8e service nova] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Refreshing network info cache for port 6e0e9732-b318-4b20-ad72-8c2bc07eaf34 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1617.643414] env[63024]: DEBUG oslo_vmware.api [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': task-1950500, 'name': Rename_Task, 'duration_secs': 1.187375} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.643414] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1617.644557] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-88c6e714-b19d-47b8-9b43-6f4b3a9e6048 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.655623] env[63024]: DEBUG oslo_vmware.api [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950501, 'name': ReconfigVM_Task, 'duration_secs': 1.007053} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1617.657102] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Reconfigured VM instance instance-00000012 to attach disk [datastore1] b765b8b3-a099-4e23-be30-d1178ecffc37/b765b8b3-a099-4e23-be30-d1178ecffc37.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1617.657784] env[63024]: DEBUG oslo_vmware.api [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Waiting for the task: (returnval){ [ 1617.657784] env[63024]: value = "task-1950504" [ 1617.657784] env[63024]: _type = "Task" [ 1617.657784] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.657999] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3f3fe4d5-ff11-4075-a7bc-f72520458055 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.669102] env[63024]: DEBUG oslo_vmware.api [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': task-1950504, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.670893] env[63024]: DEBUG oslo_vmware.api [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Waiting for the task: (returnval){ [ 1617.670893] env[63024]: value = "task-1950505" [ 1617.670893] env[63024]: _type = "Task" [ 1617.670893] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.681531] env[63024]: DEBUG oslo_vmware.api [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950505, 'name': Rename_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.775905] env[63024]: DEBUG nova.compute.manager [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1617.819747] env[63024]: DEBUG nova.network.neutron [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Successfully updated port: 4a4fca95-5ff6-49cc-8848-f863d762cb8c {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1617.865747] env[63024]: DEBUG nova.virt.hardware [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1617.865959] env[63024]: DEBUG nova.virt.hardware [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1617.866194] env[63024]: DEBUG nova.virt.hardware [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1617.866301] env[63024]: DEBUG nova.virt.hardware [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1617.866453] env[63024]: DEBUG nova.virt.hardware [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1617.866595] env[63024]: DEBUG nova.virt.hardware [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1617.866802] env[63024]: DEBUG nova.virt.hardware [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1617.866955] env[63024]: DEBUG nova.virt.hardware [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1617.867185] env[63024]: DEBUG nova.virt.hardware [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1617.867509] env[63024]: DEBUG nova.virt.hardware [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1617.867768] env[63024]: DEBUG nova.virt.hardware [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1617.873672] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Reconfiguring VM instance instance-00000005 to detach disk 2000 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1617.877378] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3107f8fd-22f5-4468-a136-627e646faec0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.893403] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-92210e6b-9b1b-4682-9fd6-bc1cf800494e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Creating Snapshot of the VM instance {{(pid=63024) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1617.893856] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-14c6c28b-b8ff-4993-855c-a47d8cc6c4b1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1617.906332] env[63024]: DEBUG oslo_vmware.api [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1950503, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.909649] env[63024]: DEBUG oslo_vmware.api [None req-92210e6b-9b1b-4682-9fd6-bc1cf800494e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Waiting for the task: (returnval){ [ 1617.909649] env[63024]: value = "task-1950507" [ 1617.909649] env[63024]: _type = "Task" [ 1617.909649] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.910316] env[63024]: DEBUG oslo_vmware.api [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Waiting for the task: (returnval){ [ 1617.910316] env[63024]: value = "task-1950506" [ 1617.910316] env[63024]: _type = "Task" [ 1617.910316] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1617.927017] env[63024]: DEBUG oslo_vmware.api [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950506, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1617.930557] env[63024]: DEBUG oslo_vmware.api [None req-92210e6b-9b1b-4682-9fd6-bc1cf800494e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950507, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.173582] env[63024]: DEBUG oslo_vmware.api [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': task-1950504, 'name': PowerOnVM_Task} progress is 64%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.188687] env[63024]: DEBUG oslo_vmware.api [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950505, 'name': Rename_Task, 'duration_secs': 0.266132} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1618.188687] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1618.188687] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cac42865-44d3-4197-bbba-00ddfa74d845 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.195152] env[63024]: DEBUG oslo_vmware.api [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Waiting for the task: (returnval){ [ 1618.195152] env[63024]: value = "task-1950508" [ 1618.195152] env[63024]: _type = "Task" [ 1618.195152] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1618.206521] env[63024]: DEBUG oslo_vmware.api [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950508, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.323614] env[63024]: DEBUG oslo_concurrency.lockutils [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Acquiring lock "refresh_cache-bd07735a-6a75-45fb-9cef-e1f2c301a489" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1618.323745] env[63024]: DEBUG oslo_concurrency.lockutils [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Acquired lock "refresh_cache-bd07735a-6a75-45fb-9cef-e1f2c301a489" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1618.323853] env[63024]: DEBUG nova.network.neutron [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1618.329038] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9de45d9-d4cb-48ee-bd5d-f1120fdc7f00 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.337659] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e1eaa97-5c22-48d3-a6c4-0a4abdea8cd3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.373957] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a047f61-e5a3-4e58-af98-1eb8b5df00bb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.384900] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-313b6c5f-1887-4733-aced-7a4a15fb7d08 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.400840] env[63024]: DEBUG nova.compute.provider_tree [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1618.402778] env[63024]: DEBUG nova.network.neutron [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Successfully created port: 182496b0-1eb9-4c3a-a2b9-4f3dec86f48c {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1618.413766] env[63024]: DEBUG oslo_vmware.api [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1950503, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.563857} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1618.420195] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2/9716d592-32d1-4f1d-b42b-1c8a7d81d2f2.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1618.420445] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1618.420984] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4adda1dd-a920-4945-bce3-e4fde9b4fccd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.428266] env[63024]: DEBUG oslo_vmware.api [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950506, 'name': ReconfigVM_Task, 'duration_secs': 0.188909} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1618.432079] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Reconfigured VM instance instance-00000005 to detach disk 2000 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1618.432679] env[63024]: DEBUG oslo_vmware.api [None req-92210e6b-9b1b-4682-9fd6-bc1cf800494e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950507, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.432679] env[63024]: DEBUG oslo_vmware.api [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 1618.432679] env[63024]: value = "task-1950509" [ 1618.432679] env[63024]: _type = "Task" [ 1618.432679] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1618.433379] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5963518b-8a52-4b98-b225-6b92cd4828bf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.458813] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Reconfiguring VM instance instance-00000005 to attach disk [datastore1] f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df/f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1618.462465] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-54806011-dd3a-4f23-a492-7ff01cc367a4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.479265] env[63024]: DEBUG oslo_vmware.api [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1950509, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.485277] env[63024]: DEBUG oslo_vmware.api [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Waiting for the task: (returnval){ [ 1618.485277] env[63024]: value = "task-1950510" [ 1618.485277] env[63024]: _type = "Task" [ 1618.485277] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1618.493643] env[63024]: DEBUG oslo_vmware.api [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950510, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.658173] env[63024]: DEBUG nova.network.neutron [req-0d91ddbf-0ca9-4ea7-84fe-f9789c22e9eb req-7595bede-600c-44a8-b8c9-cb43db46cd8e service nova] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Updated VIF entry in instance network info cache for port 6e0e9732-b318-4b20-ad72-8c2bc07eaf34. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1618.658173] env[63024]: DEBUG nova.network.neutron [req-0d91ddbf-0ca9-4ea7-84fe-f9789c22e9eb req-7595bede-600c-44a8-b8c9-cb43db46cd8e service nova] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Updating instance_info_cache with network_info: [{"id": "6e0e9732-b318-4b20-ad72-8c2bc07eaf34", "address": "fa:16:3e:2b:cc:65", "network": {"id": "0cbe22f7-6322-4d92-9a77-2753f6449a2d", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-366684254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6072e8931d9540ad8fe4a2b4b1ec782d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3ccbdbb-8b49-4a26-913f-2a448b72280f", "external-id": "nsx-vlan-transportzone-412", "segmentation_id": 412, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e0e9732-b3", "ovs_interfaceid": "6e0e9732-b318-4b20-ad72-8c2bc07eaf34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1618.673150] env[63024]: DEBUG oslo_vmware.api [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': task-1950504, 'name': PowerOnVM_Task, 'duration_secs': 0.940038} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1618.673416] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1618.673652] env[63024]: DEBUG nova.compute.manager [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1618.674437] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7036d51d-7ec1-4a4d-9eec-2557e79b0135 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.706477] env[63024]: DEBUG oslo_vmware.api [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950508, 'name': PowerOnVM_Task} progress is 1%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1618.792554] env[63024]: DEBUG nova.compute.manager [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1618.827250] env[63024]: DEBUG nova.virt.hardware [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1618.827524] env[63024]: DEBUG nova.virt.hardware [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1618.827736] env[63024]: DEBUG nova.virt.hardware [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1618.828085] env[63024]: DEBUG nova.virt.hardware [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1618.828347] env[63024]: DEBUG nova.virt.hardware [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1618.828619] env[63024]: DEBUG nova.virt.hardware [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1618.828963] env[63024]: DEBUG nova.virt.hardware [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1618.829311] env[63024]: DEBUG nova.virt.hardware [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1618.829679] env[63024]: DEBUG nova.virt.hardware [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1618.829892] env[63024]: DEBUG nova.virt.hardware [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1618.830155] env[63024]: DEBUG nova.virt.hardware [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1618.834085] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69e83e63-cf79-4e27-9410-91b712f92835 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.845439] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3934616-5f1f-4ca0-9483-3c1d0cadc239 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.909690] env[63024]: DEBUG nova.scheduler.client.report [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1618.917963] env[63024]: DEBUG nova.network.neutron [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1618.925819] env[63024]: DEBUG oslo_vmware.api [None req-92210e6b-9b1b-4682-9fd6-bc1cf800494e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950507, 'name': CreateSnapshot_Task, 'duration_secs': 0.894284} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1618.926402] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-92210e6b-9b1b-4682-9fd6-bc1cf800494e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Created Snapshot of the VM instance {{(pid=63024) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1618.927230] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0722c34-4a7b-44a7-9d26-c58092d5fe59 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.946038] env[63024]: DEBUG oslo_vmware.api [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1950509, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.079119} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1618.946307] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1618.947180] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7676dae-3380-4e57-a951-7547dffc88b6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1618.971704] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Reconfiguring VM instance instance-00000013 to attach disk [datastore1] 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2/9716d592-32d1-4f1d-b42b-1c8a7d81d2f2.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1618.974315] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9116ced6-43c8-4714-83a4-3648ccdf9712 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.000796] env[63024]: DEBUG oslo_vmware.api [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950510, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.002129] env[63024]: DEBUG oslo_vmware.api [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 1619.002129] env[63024]: value = "task-1950511" [ 1619.002129] env[63024]: _type = "Task" [ 1619.002129] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.011910] env[63024]: DEBUG oslo_vmware.api [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1950511, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.160853] env[63024]: DEBUG oslo_concurrency.lockutils [req-0d91ddbf-0ca9-4ea7-84fe-f9789c22e9eb req-7595bede-600c-44a8-b8c9-cb43db46cd8e service nova] Releasing lock "refresh_cache-9716d592-32d1-4f1d-b42b-1c8a7d81d2f2" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1619.192335] env[63024]: DEBUG oslo_concurrency.lockutils [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1619.206140] env[63024]: DEBUG oslo_vmware.api [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950508, 'name': PowerOnVM_Task} progress is 64%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.339041] env[63024]: DEBUG nova.network.neutron [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Updating instance_info_cache with network_info: [{"id": "4a4fca95-5ff6-49cc-8848-f863d762cb8c", "address": "fa:16:3e:4a:95:7d", "network": {"id": "b22bfe1a-f169-41c3-ac9b-fdcf93268110", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.90", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d78401abb63840f4b461856cfdb6dbbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a4fca95-5f", "ovs_interfaceid": "4a4fca95-5ff6-49cc-8848-f863d762cb8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1619.414814] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.654s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1619.415402] env[63024]: DEBUG nova.compute.manager [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1619.418272] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.894s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1619.419751] env[63024]: INFO nova.compute.claims [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1619.448339] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-92210e6b-9b1b-4682-9fd6-bc1cf800494e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Creating linked-clone VM from snapshot {{(pid=63024) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1619.448731] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-6990ac09-3306-4402-935f-3e6b1b467067 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.462768] env[63024]: DEBUG oslo_vmware.api [None req-92210e6b-9b1b-4682-9fd6-bc1cf800494e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Waiting for the task: (returnval){ [ 1619.462768] env[63024]: value = "task-1950512" [ 1619.462768] env[63024]: _type = "Task" [ 1619.462768] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.473234] env[63024]: DEBUG oslo_vmware.api [None req-92210e6b-9b1b-4682-9fd6-bc1cf800494e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950512, 'name': CloneVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.500706] env[63024]: DEBUG oslo_vmware.api [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950510, 'name': ReconfigVM_Task, 'duration_secs': 0.73032} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1619.500931] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Reconfigured VM instance instance-00000005 to attach disk [datastore1] f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df/f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1619.501220] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Updating instance 'f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df' progress to 50 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1619.514996] env[63024]: DEBUG oslo_vmware.api [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1950511, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.706130] env[63024]: DEBUG oslo_vmware.api [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950508, 'name': PowerOnVM_Task} progress is 91%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.844528] env[63024]: DEBUG oslo_concurrency.lockutils [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Releasing lock "refresh_cache-bd07735a-6a75-45fb-9cef-e1f2c301a489" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1619.844872] env[63024]: DEBUG nova.compute.manager [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Instance network_info: |[{"id": "4a4fca95-5ff6-49cc-8848-f863d762cb8c", "address": "fa:16:3e:4a:95:7d", "network": {"id": "b22bfe1a-f169-41c3-ac9b-fdcf93268110", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.90", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d78401abb63840f4b461856cfdb6dbbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a4fca95-5f", "ovs_interfaceid": "4a4fca95-5ff6-49cc-8848-f863d762cb8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1619.845328] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4a:95:7d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8652fa2-e608-4fe6-8b35-402a40906b40', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4a4fca95-5ff6-49cc-8848-f863d762cb8c', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1619.855985] env[63024]: DEBUG oslo.service.loopingcall [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1619.856287] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1619.856521] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-37966a74-5ddb-43a5-a7ae-82eeccb32e52 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1619.877344] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1619.877344] env[63024]: value = "task-1950513" [ 1619.877344] env[63024]: _type = "Task" [ 1619.877344] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1619.885710] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950513, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1619.928252] env[63024]: DEBUG nova.compute.utils [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1619.929803] env[63024]: DEBUG nova.compute.manager [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1619.929974] env[63024]: DEBUG nova.network.neutron [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1619.974576] env[63024]: DEBUG oslo_vmware.api [None req-92210e6b-9b1b-4682-9fd6-bc1cf800494e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950512, 'name': CloneVM_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.011685] env[63024]: DEBUG nova.policy [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f3ab8b8a4c964062a5f379b46149de59', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5489a064ee1d44f0bd6c496f4775b9d6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1620.014993] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a995662-b325-47f5-8f51-f7c619296e42 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.024209] env[63024]: DEBUG oslo_vmware.api [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1950511, 'name': ReconfigVM_Task, 'duration_secs': 0.704651} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1620.040281] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Reconfigured VM instance instance-00000013 to attach disk [datastore1] 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2/9716d592-32d1-4f1d-b42b-1c8a7d81d2f2.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1620.041084] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3a5b06a4-31d5-4ccb-a89d-a3c03f62f86c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.044088] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1be9477-9391-434a-b9bf-8fe4709e6a83 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.068019] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Updating instance 'f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df' progress to 67 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1620.077192] env[63024]: DEBUG oslo_vmware.api [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 1620.077192] env[63024]: value = "task-1950514" [ 1620.077192] env[63024]: _type = "Task" [ 1620.077192] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1620.086681] env[63024]: DEBUG oslo_vmware.api [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1950514, 'name': Rename_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.211035] env[63024]: DEBUG oslo_vmware.api [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950508, 'name': PowerOnVM_Task, 'duration_secs': 1.624613} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1620.211657] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1620.211949] env[63024]: INFO nova.compute.manager [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Took 12.09 seconds to spawn the instance on the hypervisor. [ 1620.212644] env[63024]: DEBUG nova.compute.manager [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1620.213699] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-268a03b9-b5d8-4d2b-83a3-b76cb25563a8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.387109] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950513, 'name': CreateVM_Task, 'duration_secs': 0.340588} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1620.387244] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1620.387981] env[63024]: DEBUG oslo_concurrency.lockutils [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1620.388226] env[63024]: DEBUG oslo_concurrency.lockutils [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1620.388473] env[63024]: DEBUG oslo_concurrency.lockutils [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1620.388726] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b27d8474-b857-45f4-ad68-ca0c884a01d1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.393495] env[63024]: DEBUG oslo_vmware.api [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Waiting for the task: (returnval){ [ 1620.393495] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e2070c-501c-19cb-c3d3-29628cc93a63" [ 1620.393495] env[63024]: _type = "Task" [ 1620.393495] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1620.401667] env[63024]: DEBUG oslo_vmware.api [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e2070c-501c-19cb-c3d3-29628cc93a63, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.434014] env[63024]: DEBUG nova.compute.manager [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1620.474329] env[63024]: DEBUG oslo_vmware.api [None req-92210e6b-9b1b-4682-9fd6-bc1cf800494e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950512, 'name': CloneVM_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.590659] env[63024]: DEBUG oslo_vmware.api [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1950514, 'name': Rename_Task, 'duration_secs': 0.150526} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1620.590965] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1620.593622] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fcfa26d2-8fed-432a-8f4e-05cabfb87cf9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.600631] env[63024]: DEBUG oslo_vmware.api [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 1620.600631] env[63024]: value = "task-1950515" [ 1620.600631] env[63024]: _type = "Task" [ 1620.600631] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1620.613759] env[63024]: DEBUG oslo_vmware.api [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1950515, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.736326] env[63024]: INFO nova.compute.manager [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Took 38.82 seconds to build instance. [ 1620.801862] env[63024]: DEBUG nova.network.neutron [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Port 90fdf5d2-f22d-4b1f-9b65-a0975e5c1cd3 binding to destination host cpu-1 is already ACTIVE {{(pid=63024) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1620.908472] env[63024]: DEBUG oslo_vmware.api [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e2070c-501c-19cb-c3d3-29628cc93a63, 'name': SearchDatastore_Task, 'duration_secs': 0.009513} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1620.908816] env[63024]: DEBUG oslo_concurrency.lockutils [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1620.909208] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1620.909325] env[63024]: DEBUG oslo_concurrency.lockutils [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1620.909550] env[63024]: DEBUG oslo_concurrency.lockutils [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1620.909743] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1620.910023] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b620e8e8-998b-4c37-88e5-9bc85e2b2a80 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.918595] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1620.918777] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1620.919577] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23d47972-512d-43af-b0ca-42e4de11e27c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.927515] env[63024]: DEBUG oslo_vmware.api [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Waiting for the task: (returnval){ [ 1620.927515] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]525841a1-17c5-441f-1189-ff2d29e4b7f6" [ 1620.927515] env[63024]: _type = "Task" [ 1620.927515] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1620.936511] env[63024]: DEBUG oslo_vmware.api [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]525841a1-17c5-441f-1189-ff2d29e4b7f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1620.957928] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2ea339d-5116-4fad-b366-3de01ce9c374 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.969534] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4183c186-71d8-4f6f-a12f-bf933d7851ba {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1620.977763] env[63024]: DEBUG oslo_vmware.api [None req-92210e6b-9b1b-4682-9fd6-bc1cf800494e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950512, 'name': CloneVM_Task, 'duration_secs': 1.441376} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.002904] env[63024]: INFO nova.virt.vmwareapi.vmops [None req-92210e6b-9b1b-4682-9fd6-bc1cf800494e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Created linked-clone VM from snapshot [ 1621.004935] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7977d6ad-17b7-4b4e-b6bd-06023809ffaa {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.008042] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ee709f5-f84c-43f0-8d7b-1a81430866e6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.019560] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94435f46-282b-4351-92a0-0bacd3e86253 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.024048] env[63024]: DEBUG nova.virt.vmwareapi.images [None req-92210e6b-9b1b-4682-9fd6-bc1cf800494e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Uploading image c9af5d5d-bc8a-499a-81a2-6861192785d1 {{(pid=63024) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1621.036938] env[63024]: DEBUG nova.compute.provider_tree [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1621.052621] env[63024]: DEBUG oslo_vmware.rw_handles [None req-92210e6b-9b1b-4682-9fd6-bc1cf800494e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1621.052621] env[63024]: value = "vm-402021" [ 1621.052621] env[63024]: _type = "VirtualMachine" [ 1621.052621] env[63024]: }. {{(pid=63024) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1621.052845] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-62c4bbf2-cff1-49cb-8e2f-550467d98f02 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.060063] env[63024]: DEBUG oslo_vmware.rw_handles [None req-92210e6b-9b1b-4682-9fd6-bc1cf800494e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Lease: (returnval){ [ 1621.060063] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52f7debd-99ff-0b63-afeb-5328b8bfcd13" [ 1621.060063] env[63024]: _type = "HttpNfcLease" [ 1621.060063] env[63024]: } obtained for exporting VM: (result){ [ 1621.060063] env[63024]: value = "vm-402021" [ 1621.060063] env[63024]: _type = "VirtualMachine" [ 1621.060063] env[63024]: }. {{(pid=63024) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1621.060296] env[63024]: DEBUG oslo_vmware.api [None req-92210e6b-9b1b-4682-9fd6-bc1cf800494e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Waiting for the lease: (returnval){ [ 1621.060296] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52f7debd-99ff-0b63-afeb-5328b8bfcd13" [ 1621.060296] env[63024]: _type = "HttpNfcLease" [ 1621.060296] env[63024]: } to be ready. {{(pid=63024) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1621.064242] env[63024]: DEBUG nova.network.neutron [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Successfully created port: 776bbe97-34ad-47f3-9045-81bb3c16a126 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1621.070221] env[63024]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1621.070221] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52f7debd-99ff-0b63-afeb-5328b8bfcd13" [ 1621.070221] env[63024]: _type = "HttpNfcLease" [ 1621.070221] env[63024]: } is initializing. {{(pid=63024) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1621.110113] env[63024]: DEBUG oslo_vmware.api [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1950515, 'name': PowerOnVM_Task} progress is 1%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.239816] env[63024]: DEBUG oslo_concurrency.lockutils [None req-35b4adae-0ece-41cc-b789-d0e87a7c9c54 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Lock "b765b8b3-a099-4e23-be30-d1178ecffc37" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.011s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1621.319119] env[63024]: DEBUG nova.network.neutron [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Successfully updated port: 182496b0-1eb9-4c3a-a2b9-4f3dec86f48c {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1621.437489] env[63024]: DEBUG oslo_vmware.api [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]525841a1-17c5-441f-1189-ff2d29e4b7f6, 'name': SearchDatastore_Task, 'duration_secs': 0.012505} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.438263] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-19379363-9e6c-4124-97a9-b44f04c82fb0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.443658] env[63024]: DEBUG oslo_vmware.api [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Waiting for the task: (returnval){ [ 1621.443658] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]525c5d84-abdf-29bf-0d35-48facc7ab3ba" [ 1621.443658] env[63024]: _type = "Task" [ 1621.443658] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.448490] env[63024]: DEBUG nova.compute.manager [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1621.455422] env[63024]: DEBUG oslo_vmware.api [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]525c5d84-abdf-29bf-0d35-48facc7ab3ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.473872] env[63024]: DEBUG nova.virt.hardware [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1621.474113] env[63024]: DEBUG nova.virt.hardware [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1621.474268] env[63024]: DEBUG nova.virt.hardware [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1621.474446] env[63024]: DEBUG nova.virt.hardware [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1621.474590] env[63024]: DEBUG nova.virt.hardware [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1621.474733] env[63024]: DEBUG nova.virt.hardware [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1621.474935] env[63024]: DEBUG nova.virt.hardware [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1621.475216] env[63024]: DEBUG nova.virt.hardware [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1621.475422] env[63024]: DEBUG nova.virt.hardware [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1621.475590] env[63024]: DEBUG nova.virt.hardware [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1621.475761] env[63024]: DEBUG nova.virt.hardware [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1621.476606] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2e64bae-b868-4943-b38f-ccc208622f1a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.484498] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8c89a92-b8c4-46b3-88f1-b31b1a41dbaf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.540586] env[63024]: DEBUG nova.scheduler.client.report [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1621.568013] env[63024]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1621.568013] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52f7debd-99ff-0b63-afeb-5328b8bfcd13" [ 1621.568013] env[63024]: _type = "HttpNfcLease" [ 1621.568013] env[63024]: } is ready. {{(pid=63024) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1621.568326] env[63024]: DEBUG oslo_vmware.rw_handles [None req-92210e6b-9b1b-4682-9fd6-bc1cf800494e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1621.568326] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52f7debd-99ff-0b63-afeb-5328b8bfcd13" [ 1621.568326] env[63024]: _type = "HttpNfcLease" [ 1621.568326] env[63024]: }. {{(pid=63024) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1621.569049] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53036014-80aa-4f27-8189-c98aae3fea69 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.578713] env[63024]: DEBUG oslo_vmware.rw_handles [None req-92210e6b-9b1b-4682-9fd6-bc1cf800494e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e7cb0e-65ed-bc5b-e727-59560b8cada2/disk-0.vmdk from lease info. {{(pid=63024) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1621.579040] env[63024]: DEBUG oslo_vmware.rw_handles [None req-92210e6b-9b1b-4682-9fd6-bc1cf800494e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e7cb0e-65ed-bc5b-e727-59560b8cada2/disk-0.vmdk for reading. {{(pid=63024) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1621.670829] env[63024]: DEBUG oslo_vmware.api [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1950515, 'name': PowerOnVM_Task} progress is 1%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1621.742931] env[63024]: DEBUG nova.compute.manager [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1621.775214] env[63024]: DEBUG nova.compute.manager [req-f126201c-ce01-481b-afc4-af55d758b549 req-cc4dfe4e-a9b1-4545-b851-f2bda9488d8d service nova] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Received event network-vif-plugged-4a4fca95-5ff6-49cc-8848-f863d762cb8c {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1621.775295] env[63024]: DEBUG oslo_concurrency.lockutils [req-f126201c-ce01-481b-afc4-af55d758b549 req-cc4dfe4e-a9b1-4545-b851-f2bda9488d8d service nova] Acquiring lock "bd07735a-6a75-45fb-9cef-e1f2c301a489-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1621.775501] env[63024]: DEBUG oslo_concurrency.lockutils [req-f126201c-ce01-481b-afc4-af55d758b549 req-cc4dfe4e-a9b1-4545-b851-f2bda9488d8d service nova] Lock "bd07735a-6a75-45fb-9cef-e1f2c301a489-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1621.775683] env[63024]: DEBUG oslo_concurrency.lockutils [req-f126201c-ce01-481b-afc4-af55d758b549 req-cc4dfe4e-a9b1-4545-b851-f2bda9488d8d service nova] Lock "bd07735a-6a75-45fb-9cef-e1f2c301a489-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1621.775854] env[63024]: DEBUG nova.compute.manager [req-f126201c-ce01-481b-afc4-af55d758b549 req-cc4dfe4e-a9b1-4545-b851-f2bda9488d8d service nova] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] No waiting events found dispatching network-vif-plugged-4a4fca95-5ff6-49cc-8848-f863d762cb8c {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1621.776075] env[63024]: WARNING nova.compute.manager [req-f126201c-ce01-481b-afc4-af55d758b549 req-cc4dfe4e-a9b1-4545-b851-f2bda9488d8d service nova] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Received unexpected event network-vif-plugged-4a4fca95-5ff6-49cc-8848-f863d762cb8c for instance with vm_state building and task_state spawning. [ 1621.776225] env[63024]: DEBUG nova.compute.manager [req-f126201c-ce01-481b-afc4-af55d758b549 req-cc4dfe4e-a9b1-4545-b851-f2bda9488d8d service nova] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Received event network-changed-4a4fca95-5ff6-49cc-8848-f863d762cb8c {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1621.776401] env[63024]: DEBUG nova.compute.manager [req-f126201c-ce01-481b-afc4-af55d758b549 req-cc4dfe4e-a9b1-4545-b851-f2bda9488d8d service nova] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Refreshing instance network info cache due to event network-changed-4a4fca95-5ff6-49cc-8848-f863d762cb8c. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1621.776514] env[63024]: DEBUG oslo_concurrency.lockutils [req-f126201c-ce01-481b-afc4-af55d758b549 req-cc4dfe4e-a9b1-4545-b851-f2bda9488d8d service nova] Acquiring lock "refresh_cache-bd07735a-6a75-45fb-9cef-e1f2c301a489" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1621.776648] env[63024]: DEBUG oslo_concurrency.lockutils [req-f126201c-ce01-481b-afc4-af55d758b549 req-cc4dfe4e-a9b1-4545-b851-f2bda9488d8d service nova] Acquired lock "refresh_cache-bd07735a-6a75-45fb-9cef-e1f2c301a489" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1621.776800] env[63024]: DEBUG nova.network.neutron [req-f126201c-ce01-481b-afc4-af55d758b549 req-cc4dfe4e-a9b1-4545-b851-f2bda9488d8d service nova] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Refreshing network info cache for port 4a4fca95-5ff6-49cc-8848-f863d762cb8c {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1621.827312] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Acquiring lock "f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1621.827312] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Lock "f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1621.827312] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Lock "f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1621.827312] env[63024]: DEBUG oslo_concurrency.lockutils [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "refresh_cache-9cf45c3a-2a74-4f8e-8817-47bbd748a44b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1621.827573] env[63024]: DEBUG oslo_concurrency.lockutils [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquired lock "refresh_cache-9cf45c3a-2a74-4f8e-8817-47bbd748a44b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1621.827573] env[63024]: DEBUG nova.network.neutron [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1621.833965] env[63024]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-391ba20a-df1f-4ec1-b54f-fff72e764833 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.954518] env[63024]: DEBUG oslo_vmware.api [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]525c5d84-abdf-29bf-0d35-48facc7ab3ba, 'name': SearchDatastore_Task, 'duration_secs': 0.016905} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1621.954788] env[63024]: DEBUG oslo_concurrency.lockutils [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1621.955056] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] bd07735a-6a75-45fb-9cef-e1f2c301a489/bd07735a-6a75-45fb-9cef-e1f2c301a489.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1621.955316] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b8829cbe-4003-468f-ba4f-5a7cf7d55c8b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1621.962378] env[63024]: DEBUG oslo_vmware.api [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Waiting for the task: (returnval){ [ 1621.962378] env[63024]: value = "task-1950517" [ 1621.962378] env[63024]: _type = "Task" [ 1621.962378] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1621.971149] env[63024]: DEBUG oslo_vmware.api [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950517, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.045881] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.628s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1622.046418] env[63024]: DEBUG nova.compute.manager [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1622.053022] env[63024]: DEBUG oslo_concurrency.lockutils [None req-98bfbb29-77df-4e6c-8842-6813b862a6cb tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.892s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1622.053022] env[63024]: DEBUG nova.objects.instance [None req-98bfbb29-77df-4e6c-8842-6813b862a6cb tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Lazy-loading 'resources' on Instance uuid 51532b8e-4adf-4cc7-b91e-885d7934a7e8 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1622.169672] env[63024]: DEBUG oslo_vmware.api [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1950515, 'name': PowerOnVM_Task} progress is 37%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.266603] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1622.396181] env[63024]: DEBUG nova.network.neutron [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1622.473426] env[63024]: DEBUG oslo_vmware.api [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950517, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.558271] env[63024]: DEBUG nova.compute.utils [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1622.560706] env[63024]: DEBUG nova.compute.manager [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1622.560706] env[63024]: DEBUG nova.network.neutron [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1622.673764] env[63024]: DEBUG oslo_vmware.api [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1950515, 'name': PowerOnVM_Task} progress is 80%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1622.780586] env[63024]: DEBUG nova.policy [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '07af521e467747f4904e79e8dfca8dc4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '009a393f0f504041bd9037d629bc8857', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1622.844401] env[63024]: INFO nova.compute.manager [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Rebuilding instance [ 1622.873683] env[63024]: DEBUG nova.network.neutron [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Updating instance_info_cache with network_info: [{"id": "182496b0-1eb9-4c3a-a2b9-4f3dec86f48c", "address": "fa:16:3e:2f:45:02", "network": {"id": "ffb24eaf-c6b6-414f-a69a-0c8806712ddd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-281590202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9521048e807c4ca2a6d2e74a72b829a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap182496b0-1e", "ovs_interfaceid": "182496b0-1eb9-4c3a-a2b9-4f3dec86f48c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1622.899973] env[63024]: DEBUG nova.compute.manager [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1622.906924] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b46167b-b813-41d2-b3e3-850f49574a27 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1622.911224] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Acquiring lock "refresh_cache-f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1622.911476] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Acquired lock "refresh_cache-f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1622.911703] env[63024]: DEBUG nova.network.neutron [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1622.966431] env[63024]: DEBUG nova.network.neutron [req-f126201c-ce01-481b-afc4-af55d758b549 req-cc4dfe4e-a9b1-4545-b851-f2bda9488d8d service nova] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Updated VIF entry in instance network info cache for port 4a4fca95-5ff6-49cc-8848-f863d762cb8c. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1622.966785] env[63024]: DEBUG nova.network.neutron [req-f126201c-ce01-481b-afc4-af55d758b549 req-cc4dfe4e-a9b1-4545-b851-f2bda9488d8d service nova] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Updating instance_info_cache with network_info: [{"id": "4a4fca95-5ff6-49cc-8848-f863d762cb8c", "address": "fa:16:3e:4a:95:7d", "network": {"id": "b22bfe1a-f169-41c3-ac9b-fdcf93268110", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.90", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d78401abb63840f4b461856cfdb6dbbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4a4fca95-5f", "ovs_interfaceid": "4a4fca95-5ff6-49cc-8848-f863d762cb8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1622.978791] env[63024]: DEBUG oslo_vmware.api [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950517, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1623.063333] env[63024]: DEBUG nova.compute.manager [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1623.139623] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a45c0ca7-80b2-46f0-9bfa-a0543cb86f8e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.149732] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d08bc11-d4bc-4294-81db-5de47660dafd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.193689] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea5f6ea8-31de-4293-b6c7-b36fd41cfd8f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.207302] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cbcad01-0118-4aa4-ac14-f2b6dfdb3ae0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.212221] env[63024]: DEBUG oslo_vmware.api [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1950515, 'name': PowerOnVM_Task, 'duration_secs': 2.513448} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1623.212570] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1623.212814] env[63024]: INFO nova.compute.manager [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Took 12.45 seconds to spawn the instance on the hypervisor. [ 1623.213034] env[63024]: DEBUG nova.compute.manager [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1623.214526] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36c2bbac-a968-4a07-93d5-ff5bc72b511f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.226776] env[63024]: DEBUG nova.compute.provider_tree [None req-98bfbb29-77df-4e6c-8842-6813b862a6cb tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1623.376831] env[63024]: DEBUG oslo_concurrency.lockutils [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Releasing lock "refresh_cache-9cf45c3a-2a74-4f8e-8817-47bbd748a44b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1623.377213] env[63024]: DEBUG nova.compute.manager [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Instance network_info: |[{"id": "182496b0-1eb9-4c3a-a2b9-4f3dec86f48c", "address": "fa:16:3e:2f:45:02", "network": {"id": "ffb24eaf-c6b6-414f-a69a-0c8806712ddd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-281590202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9521048e807c4ca2a6d2e74a72b829a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap182496b0-1e", "ovs_interfaceid": "182496b0-1eb9-4c3a-a2b9-4f3dec86f48c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1623.377691] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2f:45:02', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e445fb59-822c-4d7d-943b-c8e3bbaca62e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '182496b0-1eb9-4c3a-a2b9-4f3dec86f48c', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1623.388494] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Creating folder: Project (9521048e807c4ca2a6d2e74a72b829a3). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1623.388853] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3f65bc3f-4a3b-4f83-b236-df701bf6b376 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.400577] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Created folder: Project (9521048e807c4ca2a6d2e74a72b829a3) in parent group-v401959. [ 1623.400970] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Creating folder: Instances. Parent ref: group-v402023. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1623.401574] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c9641774-8cde-4d1d-af35-27e9cfde60dc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.410638] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Created folder: Instances in parent group-v402023. [ 1623.410945] env[63024]: DEBUG oslo.service.loopingcall [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1623.411119] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1623.411298] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7277f3d3-897e-4fcd-a131-68f3f8d91eb2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.437496] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1623.437496] env[63024]: value = "task-1950520" [ 1623.437496] env[63024]: _type = "Task" [ 1623.437496] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1623.446141] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950520, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1623.473906] env[63024]: DEBUG oslo_concurrency.lockutils [req-f126201c-ce01-481b-afc4-af55d758b549 req-cc4dfe4e-a9b1-4545-b851-f2bda9488d8d service nova] Releasing lock "refresh_cache-bd07735a-6a75-45fb-9cef-e1f2c301a489" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1623.474211] env[63024]: DEBUG nova.compute.manager [req-f126201c-ce01-481b-afc4-af55d758b549 req-cc4dfe4e-a9b1-4545-b851-f2bda9488d8d service nova] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Received event network-changed-fe1aa30b-c99e-4641-9d91-c99d20670de0 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1623.474379] env[63024]: DEBUG nova.compute.manager [req-f126201c-ce01-481b-afc4-af55d758b549 req-cc4dfe4e-a9b1-4545-b851-f2bda9488d8d service nova] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Refreshing instance network info cache due to event network-changed-fe1aa30b-c99e-4641-9d91-c99d20670de0. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1623.474597] env[63024]: DEBUG oslo_concurrency.lockutils [req-f126201c-ce01-481b-afc4-af55d758b549 req-cc4dfe4e-a9b1-4545-b851-f2bda9488d8d service nova] Acquiring lock "refresh_cache-b629b4f8-f79f-4361-b78c-8705a6888a9e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1623.474738] env[63024]: DEBUG oslo_concurrency.lockutils [req-f126201c-ce01-481b-afc4-af55d758b549 req-cc4dfe4e-a9b1-4545-b851-f2bda9488d8d service nova] Acquired lock "refresh_cache-b629b4f8-f79f-4361-b78c-8705a6888a9e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1623.474889] env[63024]: DEBUG nova.network.neutron [req-f126201c-ce01-481b-afc4-af55d758b549 req-cc4dfe4e-a9b1-4545-b851-f2bda9488d8d service nova] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Refreshing network info cache for port fe1aa30b-c99e-4641-9d91-c99d20670de0 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1623.479700] env[63024]: DEBUG oslo_vmware.api [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950517, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1623.731310] env[63024]: DEBUG nova.scheduler.client.report [None req-98bfbb29-77df-4e6c-8842-6813b862a6cb tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1623.749658] env[63024]: INFO nova.compute.manager [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Took 37.62 seconds to build instance. [ 1623.934019] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1623.934540] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-686410df-39bc-4272-a43e-31c93d5aa26d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.943630] env[63024]: DEBUG oslo_vmware.api [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Waiting for the task: (returnval){ [ 1623.943630] env[63024]: value = "task-1950521" [ 1623.943630] env[63024]: _type = "Task" [ 1623.943630] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1623.952033] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950520, 'name': CreateVM_Task, 'duration_secs': 0.388628} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1623.952666] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1623.954180] env[63024]: DEBUG oslo_concurrency.lockutils [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1623.954816] env[63024]: DEBUG oslo_concurrency.lockutils [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1623.955319] env[63024]: DEBUG oslo_concurrency.lockutils [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1623.959301] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-71df1431-a765-4997-bdcc-3fe890e84e69 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1623.961601] env[63024]: DEBUG oslo_vmware.api [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Task: {'id': task-1950521, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1623.965578] env[63024]: DEBUG oslo_vmware.api [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 1623.965578] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]520c6a2c-dbeb-b671-879b-6008c00b7434" [ 1623.965578] env[63024]: _type = "Task" [ 1623.965578] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1623.982868] env[63024]: DEBUG oslo_vmware.api [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]520c6a2c-dbeb-b671-879b-6008c00b7434, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1623.989162] env[63024]: DEBUG oslo_vmware.api [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950517, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.005636] env[63024]: DEBUG nova.network.neutron [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Successfully updated port: 776bbe97-34ad-47f3-9045-81bb3c16a126 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1624.074303] env[63024]: DEBUG nova.compute.manager [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1624.113021] env[63024]: DEBUG nova.virt.hardware [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1624.113021] env[63024]: DEBUG nova.virt.hardware [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1624.113021] env[63024]: DEBUG nova.virt.hardware [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1624.113268] env[63024]: DEBUG nova.virt.hardware [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1624.113268] env[63024]: DEBUG nova.virt.hardware [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1624.113268] env[63024]: DEBUG nova.virt.hardware [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1624.113268] env[63024]: DEBUG nova.virt.hardware [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1624.113268] env[63024]: DEBUG nova.virt.hardware [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1624.113398] env[63024]: DEBUG nova.virt.hardware [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1624.113398] env[63024]: DEBUG nova.virt.hardware [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1624.113653] env[63024]: DEBUG nova.virt.hardware [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1624.114929] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cc35ff7-4587-4107-8833-eb14f8e1e0ba {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.124335] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1db21994-872e-445e-9702-41d0af4236d2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.238195] env[63024]: DEBUG oslo_concurrency.lockutils [None req-98bfbb29-77df-4e6c-8842-6813b862a6cb tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.189s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1624.243538] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6109e178-1053-4869-b98a-483fd65fc66e tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.728s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1624.243787] env[63024]: DEBUG nova.objects.instance [None req-6109e178-1053-4869-b98a-483fd65fc66e tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lazy-loading 'resources' on Instance uuid b0b4d94c-cd5c-4452-baa6-9aeec46b43ad {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1624.254723] env[63024]: DEBUG oslo_concurrency.lockutils [None req-36a6c30c-e8e5-4fe8-a813-cd492b1e9805 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "9716d592-32d1-4f1d-b42b-1c8a7d81d2f2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 48.536s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1624.254723] env[63024]: DEBUG nova.network.neutron [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Successfully created port: 246e1d4e-5ecf-48af-aca8-d7ee68ab39c7 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1624.277651] env[63024]: INFO nova.scheduler.client.report [None req-98bfbb29-77df-4e6c-8842-6813b862a6cb tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Deleted allocations for instance 51532b8e-4adf-4cc7-b91e-885d7934a7e8 [ 1624.364207] env[63024]: DEBUG nova.network.neutron [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Updating instance_info_cache with network_info: [{"id": "90fdf5d2-f22d-4b1f-9b65-a0975e5c1cd3", "address": "fa:16:3e:64:8e:6a", "network": {"id": "b22bfe1a-f169-41c3-ac9b-fdcf93268110", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d78401abb63840f4b461856cfdb6dbbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90fdf5d2-f2", "ovs_interfaceid": "90fdf5d2-f22d-4b1f-9b65-a0975e5c1cd3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1624.460145] env[63024]: DEBUG oslo_vmware.api [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Task: {'id': task-1950521, 'name': PowerOffVM_Task, 'duration_secs': 0.223537} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1624.460627] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1624.460972] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1624.461971] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39a80ba7-13f5-4231-9230-19f3f44ca458 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.472032] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1624.476037] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-469a1002-8593-456e-b0d7-a206fd1ad675 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.481855] env[63024]: DEBUG oslo_vmware.api [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]520c6a2c-dbeb-b671-879b-6008c00b7434, 'name': SearchDatastore_Task, 'duration_secs': 0.068062} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1624.483642] env[63024]: DEBUG oslo_concurrency.lockutils [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1624.484116] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1624.484532] env[63024]: DEBUG oslo_concurrency.lockutils [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1624.484761] env[63024]: DEBUG oslo_concurrency.lockutils [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1624.484968] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1624.488385] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b9c01e90-02a9-4fab-8d20-7e1b6e7b9dc5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.490608] env[63024]: DEBUG oslo_vmware.api [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950517, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.347895} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1624.491618] env[63024]: DEBUG nova.network.neutron [req-f126201c-ce01-481b-afc4-af55d758b549 req-cc4dfe4e-a9b1-4545-b851-f2bda9488d8d service nova] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Updated VIF entry in instance network info cache for port fe1aa30b-c99e-4641-9d91-c99d20670de0. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1624.491971] env[63024]: DEBUG nova.network.neutron [req-f126201c-ce01-481b-afc4-af55d758b549 req-cc4dfe4e-a9b1-4545-b851-f2bda9488d8d service nova] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Updating instance_info_cache with network_info: [{"id": "fe1aa30b-c99e-4641-9d91-c99d20670de0", "address": "fa:16:3e:ec:ef:e1", "network": {"id": "dab57617-8c96-4c9c-a117-05fd2262c951", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1124018667-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "751ed00ef16a4cca832e3c78731c9379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13d625c9-77ec-4edb-a56b-9f37a314cc39", "external-id": "nsx-vlan-transportzone-358", "segmentation_id": 358, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe1aa30b-c9", "ovs_interfaceid": "fe1aa30b-c99e-4641-9d91-c99d20670de0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1624.493834] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] bd07735a-6a75-45fb-9cef-e1f2c301a489/bd07735a-6a75-45fb-9cef-e1f2c301a489.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1624.493948] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1624.494868] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6478bc94-6d09-4b86-aa1e-cb823990cd2f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.501805] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1624.502216] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1624.504302] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20586e01-b159-47c4-91f8-2048f32a06d3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.507584] env[63024]: DEBUG oslo_vmware.api [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Waiting for the task: (returnval){ [ 1624.507584] env[63024]: value = "task-1950523" [ 1624.507584] env[63024]: _type = "Task" [ 1624.507584] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1624.508945] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Acquiring lock "refresh_cache-d49eae54-cccb-4281-aaa0-d6974529eb7b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1624.509252] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Acquired lock "refresh_cache-d49eae54-cccb-4281-aaa0-d6974529eb7b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1624.509461] env[63024]: DEBUG nova.network.neutron [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1624.515904] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1624.516264] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1624.516521] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Deleting the datastore file [datastore1] 726d9639-1ab4-46a9-975e-5580c8344a37 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1624.518470] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-11ffd847-c7b7-4415-a61b-900d53819a11 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.522176] env[63024]: DEBUG oslo_vmware.api [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 1624.522176] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52043745-875b-045f-5091-42438b921d17" [ 1624.522176] env[63024]: _type = "Task" [ 1624.522176] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1624.531820] env[63024]: DEBUG oslo_vmware.api [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950523, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.532193] env[63024]: DEBUG oslo_vmware.api [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Waiting for the task: (returnval){ [ 1624.532193] env[63024]: value = "task-1950524" [ 1624.532193] env[63024]: _type = "Task" [ 1624.532193] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1624.540575] env[63024]: DEBUG oslo_vmware.api [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52043745-875b-045f-5091-42438b921d17, 'name': SearchDatastore_Task, 'duration_secs': 0.020747} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1624.541516] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0acb5742-ff39-4416-8e81-912511734cbf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1624.547710] env[63024]: DEBUG oslo_vmware.api [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Task: {'id': task-1950524, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.551340] env[63024]: DEBUG oslo_vmware.api [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 1624.551340] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]524a17c4-a89f-2fdb-022c-97e34f148ff4" [ 1624.551340] env[63024]: _type = "Task" [ 1624.551340] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1624.560375] env[63024]: DEBUG oslo_vmware.api [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]524a17c4-a89f-2fdb-022c-97e34f148ff4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1624.758993] env[63024]: DEBUG nova.compute.manager [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1624.790579] env[63024]: DEBUG oslo_concurrency.lockutils [None req-98bfbb29-77df-4e6c-8842-6813b862a6cb tempest-ServerDiagnosticsV248Test-1954764169 tempest-ServerDiagnosticsV248Test-1954764169-project-member] Lock "51532b8e-4adf-4cc7-b91e-885d7934a7e8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.449s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1624.870213] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Releasing lock "refresh_cache-f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1624.995735] env[63024]: DEBUG oslo_concurrency.lockutils [req-f126201c-ce01-481b-afc4-af55d758b549 req-cc4dfe4e-a9b1-4545-b851-f2bda9488d8d service nova] Releasing lock "refresh_cache-b629b4f8-f79f-4361-b78c-8705a6888a9e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1625.024633] env[63024]: DEBUG oslo_vmware.api [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950523, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.118245} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1625.025046] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1625.026179] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-399690bd-2e44-4abb-8801-bc3b6c96de00 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.053089] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Reconfiguring VM instance instance-00000014 to attach disk [datastore1] bd07735a-6a75-45fb-9cef-e1f2c301a489/bd07735a-6a75-45fb-9cef-e1f2c301a489.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1625.060351] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-daf53134-2a83-45a3-84d9-514ced44023b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.075557] env[63024]: DEBUG nova.network.neutron [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1625.086932] env[63024]: DEBUG oslo_vmware.api [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Task: {'id': task-1950524, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.276347} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1625.091402] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1625.091594] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1625.091782] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1625.094286] env[63024]: DEBUG oslo_vmware.api [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Waiting for the task: (returnval){ [ 1625.094286] env[63024]: value = "task-1950525" [ 1625.094286] env[63024]: _type = "Task" [ 1625.094286] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1625.094512] env[63024]: DEBUG oslo_vmware.api [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]524a17c4-a89f-2fdb-022c-97e34f148ff4, 'name': SearchDatastore_Task, 'duration_secs': 0.017509} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1625.097763] env[63024]: DEBUG oslo_concurrency.lockutils [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1625.098036] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 9cf45c3a-2a74-4f8e-8817-47bbd748a44b/9cf45c3a-2a74-4f8e-8817-47bbd748a44b.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1625.103322] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2dddde4c-78a4-44fd-b47a-db8296252df7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.112158] env[63024]: DEBUG oslo_vmware.api [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950525, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.113818] env[63024]: DEBUG oslo_vmware.api [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 1625.113818] env[63024]: value = "task-1950526" [ 1625.113818] env[63024]: _type = "Task" [ 1625.113818] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1625.124812] env[63024]: DEBUG oslo_vmware.api [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1950526, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.263765] env[63024]: DEBUG nova.network.neutron [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Updating instance_info_cache with network_info: [{"id": "776bbe97-34ad-47f3-9045-81bb3c16a126", "address": "fa:16:3e:e6:1d:cf", "network": {"id": "f42f7ff4-b2ef-45fd-8230-5f16271d5808", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-607041553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "5489a064ee1d44f0bd6c496f4775b9d6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56834f67-27a8-43dc-bbc6-a74aaa08959b", "external-id": "nsx-vlan-transportzone-949", "segmentation_id": 949, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap776bbe97-34", "ovs_interfaceid": "776bbe97-34ad-47f3-9045-81bb3c16a126", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1625.283711] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1625.328676] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54028c88-1a95-4065-91cc-2c3ae9b8ce0f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.338476] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47b540a8-ec3c-4314-9ba5-c5766b96e57a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.370712] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4cd0a5f-0f23-4900-bb54-ddb490dc0d02 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.381783] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5c61f92-8833-4d9f-adb8-9fda5655f80a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.402822] env[63024]: DEBUG nova.compute.provider_tree [None req-6109e178-1053-4869-b98a-483fd65fc66e tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1625.405571] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e064b99a-5aec-4158-8f27-94a89f6ad584 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.427075] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1625.429070] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8268a491-f68d-4818-80de-ece8c55baa0f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.437401] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Updating instance 'f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df' progress to 83 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1625.469297] env[63024]: DEBUG nova.compute.manager [req-cc353ddc-a97f-4ab1-849d-741ea844afd0 req-ec19d67f-6365-434d-804e-447771f9c4b0 service nova] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Received event network-vif-plugged-182496b0-1eb9-4c3a-a2b9-4f3dec86f48c {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1625.469604] env[63024]: DEBUG oslo_concurrency.lockutils [req-cc353ddc-a97f-4ab1-849d-741ea844afd0 req-ec19d67f-6365-434d-804e-447771f9c4b0 service nova] Acquiring lock "9cf45c3a-2a74-4f8e-8817-47bbd748a44b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1625.469940] env[63024]: DEBUG oslo_concurrency.lockutils [req-cc353ddc-a97f-4ab1-849d-741ea844afd0 req-ec19d67f-6365-434d-804e-447771f9c4b0 service nova] Lock "9cf45c3a-2a74-4f8e-8817-47bbd748a44b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1625.470189] env[63024]: DEBUG oslo_concurrency.lockutils [req-cc353ddc-a97f-4ab1-849d-741ea844afd0 req-ec19d67f-6365-434d-804e-447771f9c4b0 service nova] Lock "9cf45c3a-2a74-4f8e-8817-47bbd748a44b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1625.470374] env[63024]: DEBUG nova.compute.manager [req-cc353ddc-a97f-4ab1-849d-741ea844afd0 req-ec19d67f-6365-434d-804e-447771f9c4b0 service nova] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] No waiting events found dispatching network-vif-plugged-182496b0-1eb9-4c3a-a2b9-4f3dec86f48c {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1625.470538] env[63024]: WARNING nova.compute.manager [req-cc353ddc-a97f-4ab1-849d-741ea844afd0 req-ec19d67f-6365-434d-804e-447771f9c4b0 service nova] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Received unexpected event network-vif-plugged-182496b0-1eb9-4c3a-a2b9-4f3dec86f48c for instance with vm_state building and task_state spawning. [ 1625.470715] env[63024]: DEBUG nova.compute.manager [req-cc353ddc-a97f-4ab1-849d-741ea844afd0 req-ec19d67f-6365-434d-804e-447771f9c4b0 service nova] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Received event network-changed-182496b0-1eb9-4c3a-a2b9-4f3dec86f48c {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1625.470890] env[63024]: DEBUG nova.compute.manager [req-cc353ddc-a97f-4ab1-849d-741ea844afd0 req-ec19d67f-6365-434d-804e-447771f9c4b0 service nova] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Refreshing instance network info cache due to event network-changed-182496b0-1eb9-4c3a-a2b9-4f3dec86f48c. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1625.471117] env[63024]: DEBUG oslo_concurrency.lockutils [req-cc353ddc-a97f-4ab1-849d-741ea844afd0 req-ec19d67f-6365-434d-804e-447771f9c4b0 service nova] Acquiring lock "refresh_cache-9cf45c3a-2a74-4f8e-8817-47bbd748a44b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1625.471261] env[63024]: DEBUG oslo_concurrency.lockutils [req-cc353ddc-a97f-4ab1-849d-741ea844afd0 req-ec19d67f-6365-434d-804e-447771f9c4b0 service nova] Acquired lock "refresh_cache-9cf45c3a-2a74-4f8e-8817-47bbd748a44b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1625.471905] env[63024]: DEBUG nova.network.neutron [req-cc353ddc-a97f-4ab1-849d-741ea844afd0 req-ec19d67f-6365-434d-804e-447771f9c4b0 service nova] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Refreshing network info cache for port 182496b0-1eb9-4c3a-a2b9-4f3dec86f48c {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1625.612089] env[63024]: DEBUG oslo_vmware.api [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950525, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.629663] env[63024]: DEBUG oslo_vmware.api [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1950526, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.767948] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Releasing lock "refresh_cache-d49eae54-cccb-4281-aaa0-d6974529eb7b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1625.768330] env[63024]: DEBUG nova.compute.manager [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Instance network_info: |[{"id": "776bbe97-34ad-47f3-9045-81bb3c16a126", "address": "fa:16:3e:e6:1d:cf", "network": {"id": "f42f7ff4-b2ef-45fd-8230-5f16271d5808", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-607041553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "5489a064ee1d44f0bd6c496f4775b9d6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56834f67-27a8-43dc-bbc6-a74aaa08959b", "external-id": "nsx-vlan-transportzone-949", "segmentation_id": 949, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap776bbe97-34", "ovs_interfaceid": "776bbe97-34ad-47f3-9045-81bb3c16a126", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1625.768768] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e6:1d:cf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '56834f67-27a8-43dc-bbc6-a74aaa08959b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '776bbe97-34ad-47f3-9045-81bb3c16a126', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1625.776995] env[63024]: DEBUG oslo.service.loopingcall [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1625.778169] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1625.778718] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d7918f9c-dfbd-4206-b5c3-3162716a4482 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.801656] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1625.801656] env[63024]: value = "task-1950527" [ 1625.801656] env[63024]: _type = "Task" [ 1625.801656] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1625.811224] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950527, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1625.904988] env[63024]: DEBUG nova.scheduler.client.report [None req-6109e178-1053-4869-b98a-483fd65fc66e tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1625.946277] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1625.946277] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-11683bf5-7225-4e3c-b2e3-df599374665e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1625.955146] env[63024]: DEBUG oslo_vmware.api [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Waiting for the task: (returnval){ [ 1625.955146] env[63024]: value = "task-1950528" [ 1625.955146] env[63024]: _type = "Task" [ 1625.955146] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1625.964178] env[63024]: DEBUG oslo_vmware.api [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950528, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.124316] env[63024]: DEBUG oslo_vmware.api [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950525, 'name': ReconfigVM_Task, 'duration_secs': 0.634578} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1626.124962] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Reconfigured VM instance instance-00000014 to attach disk [datastore1] bd07735a-6a75-45fb-9cef-e1f2c301a489/bd07735a-6a75-45fb-9cef-e1f2c301a489.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1626.125629] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dfb86ace-3002-43b7-a07c-9258f5d949a4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.132499] env[63024]: DEBUG oslo_vmware.api [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1950526, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.768985} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1626.133393] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 9cf45c3a-2a74-4f8e-8817-47bbd748a44b/9cf45c3a-2a74-4f8e-8817-47bbd748a44b.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1626.133597] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1626.133921] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-25b85c3b-2312-434a-8752-e1fbfe4d579e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.137824] env[63024]: DEBUG oslo_vmware.api [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Waiting for the task: (returnval){ [ 1626.137824] env[63024]: value = "task-1950529" [ 1626.137824] env[63024]: _type = "Task" [ 1626.137824] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.140635] env[63024]: DEBUG nova.virt.hardware [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1626.140963] env[63024]: DEBUG nova.virt.hardware [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1626.141191] env[63024]: DEBUG nova.virt.hardware [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1626.141418] env[63024]: DEBUG nova.virt.hardware [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1626.141574] env[63024]: DEBUG nova.virt.hardware [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1626.141771] env[63024]: DEBUG nova.virt.hardware [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1626.141994] env[63024]: DEBUG nova.virt.hardware [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1626.148331] env[63024]: DEBUG nova.virt.hardware [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1626.152420] env[63024]: DEBUG nova.virt.hardware [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1626.152420] env[63024]: DEBUG nova.virt.hardware [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1626.152420] env[63024]: DEBUG nova.virt.hardware [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1626.152507] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c2f8abb-ee0a-4142-8fa6-6aaaaed8be70 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.165371] env[63024]: DEBUG oslo_vmware.api [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 1626.165371] env[63024]: value = "task-1950530" [ 1626.165371] env[63024]: _type = "Task" [ 1626.165371] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.176748] env[63024]: DEBUG oslo_vmware.api [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950529, 'name': Rename_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.178819] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ef358e2-1196-48fe-9657-e609cedf64f4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.186544] env[63024]: DEBUG oslo_vmware.api [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1950530, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.203609] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Instance VIF info [] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1626.211478] env[63024]: DEBUG oslo.service.loopingcall [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1626.211859] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1626.212196] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3c776ac1-4f3d-4595-ae7a-68b8660ea774 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.238317] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1626.238317] env[63024]: value = "task-1950531" [ 1626.238317] env[63024]: _type = "Task" [ 1626.238317] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.245602] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950531, 'name': CreateVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.316734] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950527, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.324190] env[63024]: DEBUG nova.network.neutron [req-cc353ddc-a97f-4ab1-849d-741ea844afd0 req-ec19d67f-6365-434d-804e-447771f9c4b0 service nova] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Updated VIF entry in instance network info cache for port 182496b0-1eb9-4c3a-a2b9-4f3dec86f48c. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1626.324592] env[63024]: DEBUG nova.network.neutron [req-cc353ddc-a97f-4ab1-849d-741ea844afd0 req-ec19d67f-6365-434d-804e-447771f9c4b0 service nova] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Updating instance_info_cache with network_info: [{"id": "182496b0-1eb9-4c3a-a2b9-4f3dec86f48c", "address": "fa:16:3e:2f:45:02", "network": {"id": "ffb24eaf-c6b6-414f-a69a-0c8806712ddd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-281590202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9521048e807c4ca2a6d2e74a72b829a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap182496b0-1e", "ovs_interfaceid": "182496b0-1eb9-4c3a-a2b9-4f3dec86f48c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1626.410361] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6109e178-1053-4869-b98a-483fd65fc66e tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.167s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1626.412910] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2da24e96-7f50-431e-9f5c-a2fe4c29f616 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.363s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1626.413161] env[63024]: DEBUG nova.objects.instance [None req-2da24e96-7f50-431e-9f5c-a2fe4c29f616 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Lazy-loading 'resources' on Instance uuid de31255d-b82f-4f32-82b2-0a8368fe2510 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1626.436451] env[63024]: INFO nova.scheduler.client.report [None req-6109e178-1053-4869-b98a-483fd65fc66e tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Deleted allocations for instance b0b4d94c-cd5c-4452-baa6-9aeec46b43ad [ 1626.464013] env[63024]: DEBUG oslo_vmware.api [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950528, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.666735] env[63024]: DEBUG oslo_vmware.api [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950529, 'name': Rename_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.681715] env[63024]: DEBUG oslo_vmware.api [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1950530, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066082} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1626.681715] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1626.683164] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50dc3ee9-f5cd-4284-80e0-1175ee563dc4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.708759] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Reconfiguring VM instance instance-00000015 to attach disk [datastore1] 9cf45c3a-2a74-4f8e-8817-47bbd748a44b/9cf45c3a-2a74-4f8e-8817-47bbd748a44b.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1626.709098] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a6196eac-f4bd-40cc-9867-2311752dcc80 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.730149] env[63024]: DEBUG oslo_vmware.api [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 1626.730149] env[63024]: value = "task-1950532" [ 1626.730149] env[63024]: _type = "Task" [ 1626.730149] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.742036] env[63024]: DEBUG oslo_vmware.api [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1950532, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.748291] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950531, 'name': CreateVM_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.753621] env[63024]: DEBUG nova.network.neutron [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Successfully updated port: 246e1d4e-5ecf-48af-aca8-d7ee68ab39c7 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1626.817107] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950527, 'name': CreateVM_Task, 'duration_secs': 0.521608} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1626.817107] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1626.817107] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1626.817107] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1626.817107] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1626.817668] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72a9de1a-0be1-472a-a2cf-28bedea9e48b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1626.823868] env[63024]: DEBUG oslo_vmware.api [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Waiting for the task: (returnval){ [ 1626.823868] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]526eb45c-0498-26bf-1906-54da476af9bb" [ 1626.823868] env[63024]: _type = "Task" [ 1626.823868] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1626.828950] env[63024]: DEBUG oslo_concurrency.lockutils [req-cc353ddc-a97f-4ab1-849d-741ea844afd0 req-ec19d67f-6365-434d-804e-447771f9c4b0 service nova] Releasing lock "refresh_cache-9cf45c3a-2a74-4f8e-8817-47bbd748a44b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1626.829398] env[63024]: DEBUG nova.compute.manager [req-cc353ddc-a97f-4ab1-849d-741ea844afd0 req-ec19d67f-6365-434d-804e-447771f9c4b0 service nova] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Received event network-changed-fe1aa30b-c99e-4641-9d91-c99d20670de0 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1626.829746] env[63024]: DEBUG nova.compute.manager [req-cc353ddc-a97f-4ab1-849d-741ea844afd0 req-ec19d67f-6365-434d-804e-447771f9c4b0 service nova] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Refreshing instance network info cache due to event network-changed-fe1aa30b-c99e-4641-9d91-c99d20670de0. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1626.830126] env[63024]: DEBUG oslo_concurrency.lockutils [req-cc353ddc-a97f-4ab1-849d-741ea844afd0 req-ec19d67f-6365-434d-804e-447771f9c4b0 service nova] Acquiring lock "refresh_cache-b629b4f8-f79f-4361-b78c-8705a6888a9e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1626.830561] env[63024]: DEBUG oslo_concurrency.lockutils [req-cc353ddc-a97f-4ab1-849d-741ea844afd0 req-ec19d67f-6365-434d-804e-447771f9c4b0 service nova] Acquired lock "refresh_cache-b629b4f8-f79f-4361-b78c-8705a6888a9e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1626.831013] env[63024]: DEBUG nova.network.neutron [req-cc353ddc-a97f-4ab1-849d-741ea844afd0 req-ec19d67f-6365-434d-804e-447771f9c4b0 service nova] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Refreshing network info cache for port fe1aa30b-c99e-4641-9d91-c99d20670de0 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1626.839585] env[63024]: DEBUG oslo_vmware.api [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]526eb45c-0498-26bf-1906-54da476af9bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1626.947909] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6109e178-1053-4869-b98a-483fd65fc66e tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "b0b4d94c-cd5c-4452-baa6-9aeec46b43ad" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.622s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1626.969017] env[63024]: DEBUG oslo_vmware.api [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950528, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.166187] env[63024]: DEBUG oslo_vmware.api [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950529, 'name': Rename_Task, 'duration_secs': 0.728694} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1627.166187] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1627.166187] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-65c1fa25-c47c-43bb-9baf-b06bd70c5c8b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.171832] env[63024]: DEBUG oslo_vmware.api [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Waiting for the task: (returnval){ [ 1627.171832] env[63024]: value = "task-1950533" [ 1627.171832] env[63024]: _type = "Task" [ 1627.171832] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1627.182107] env[63024]: DEBUG oslo_vmware.api [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950533, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.243384] env[63024]: DEBUG oslo_vmware.api [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1950532, 'name': ReconfigVM_Task, 'duration_secs': 0.312271} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1627.248595] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Reconfigured VM instance instance-00000015 to attach disk [datastore1] 9cf45c3a-2a74-4f8e-8817-47bbd748a44b/9cf45c3a-2a74-4f8e-8817-47bbd748a44b.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1627.248595] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cac9cc98-cc02-4d66-a8df-6d0fa0599005 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.254591] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Acquiring lock "refresh_cache-e3c9e9de-586d-4baa-b4bb-95c41d527a03" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1627.254947] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Acquired lock "refresh_cache-e3c9e9de-586d-4baa-b4bb-95c41d527a03" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1627.255136] env[63024]: DEBUG nova.network.neutron [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1627.256406] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950531, 'name': CreateVM_Task, 'duration_secs': 0.790727} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1627.257742] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1627.258281] env[63024]: DEBUG oslo_vmware.api [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 1627.258281] env[63024]: value = "task-1950534" [ 1627.258281] env[63024]: _type = "Task" [ 1627.258281] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1627.265257] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1627.279250] env[63024]: DEBUG oslo_vmware.api [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1950534, 'name': Rename_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.342094] env[63024]: DEBUG oslo_vmware.api [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]526eb45c-0498-26bf-1906-54da476af9bb, 'name': SearchDatastore_Task, 'duration_secs': 0.015509} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1627.344843] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1627.345298] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1627.345556] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1627.345843] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1627.346048] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1627.347773] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1627.349492] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1627.349492] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6415ca35-58ee-4475-8ff9-29ae229a98e7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.351698] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-672ba2b5-cf44-40f6-97e7-4ae7f2420cb6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.357996] env[63024]: DEBUG oslo_vmware.api [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Waiting for the task: (returnval){ [ 1627.357996] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5223e560-40c9-8b12-3227-1b446b50853b" [ 1627.357996] env[63024]: _type = "Task" [ 1627.357996] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1627.365976] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1627.366248] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1627.368713] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31994b99-703e-4592-acea-0c406a838f5e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.378228] env[63024]: DEBUG oslo_vmware.api [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5223e560-40c9-8b12-3227-1b446b50853b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.382764] env[63024]: DEBUG oslo_vmware.api [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Waiting for the task: (returnval){ [ 1627.382764] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52929757-ea3b-412b-9faa-b7e3770459e6" [ 1627.382764] env[63024]: _type = "Task" [ 1627.382764] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1627.393780] env[63024]: DEBUG oslo_vmware.api [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52929757-ea3b-412b-9faa-b7e3770459e6, 'name': SearchDatastore_Task, 'duration_secs': 0.016328} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1627.398627] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c70cb025-5ea2-4d36-9321-32d3834a5420 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.403700] env[63024]: DEBUG oslo_vmware.api [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Waiting for the task: (returnval){ [ 1627.403700] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52701631-1eea-b877-4d87-3bf94c8da514" [ 1627.403700] env[63024]: _type = "Task" [ 1627.403700] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1627.416792] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1627.417415] env[63024]: DEBUG oslo_vmware.api [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52701631-1eea-b877-4d87-3bf94c8da514, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.417659] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager.update_available_resource {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1627.434576] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92f89e90-b2b0-43dc-b9e5-c57534469f42 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.444455] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59f2160a-7ca3-46fa-b33e-825a8052940c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.482883] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f861e0f9-dbfb-4ffe-a095-25ab25a7473b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.490776] env[63024]: DEBUG oslo_vmware.api [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950528, 'name': PowerOnVM_Task, 'duration_secs': 1.183292} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1627.492911] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1627.495016] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1e1415cc-cbeb-4aef-87f8-46ab24ac556b tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Updating instance 'f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df' progress to 100 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1627.499724] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc13248d-83b8-4099-935a-a39ed155d7df {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.513742] env[63024]: DEBUG nova.compute.provider_tree [None req-2da24e96-7f50-431e-9f5c-a2fe4c29f616 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1627.683263] env[63024]: DEBUG oslo_vmware.api [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950533, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.778555] env[63024]: DEBUG oslo_vmware.api [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1950534, 'name': Rename_Task, 'duration_secs': 0.193209} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1627.779078] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1627.779154] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f8a4caa0-928a-4b90-8ef2-e1dfe84f164e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.790197] env[63024]: DEBUG oslo_vmware.api [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 1627.790197] env[63024]: value = "task-1950535" [ 1627.790197] env[63024]: _type = "Task" [ 1627.790197] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1627.802007] env[63024]: DEBUG oslo_vmware.api [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1950535, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.803968] env[63024]: DEBUG nova.network.neutron [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1627.872876] env[63024]: DEBUG oslo_vmware.api [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5223e560-40c9-8b12-3227-1b446b50853b, 'name': SearchDatastore_Task, 'duration_secs': 0.018119} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1627.873209] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1627.873432] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1627.873682] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1627.892395] env[63024]: DEBUG nova.network.neutron [req-cc353ddc-a97f-4ab1-849d-741ea844afd0 req-ec19d67f-6365-434d-804e-447771f9c4b0 service nova] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Updated VIF entry in instance network info cache for port fe1aa30b-c99e-4641-9d91-c99d20670de0. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1627.892738] env[63024]: DEBUG nova.network.neutron [req-cc353ddc-a97f-4ab1-849d-741ea844afd0 req-ec19d67f-6365-434d-804e-447771f9c4b0 service nova] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Updating instance_info_cache with network_info: [{"id": "fe1aa30b-c99e-4641-9d91-c99d20670de0", "address": "fa:16:3e:ec:ef:e1", "network": {"id": "dab57617-8c96-4c9c-a117-05fd2262c951", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1124018667-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "751ed00ef16a4cca832e3c78731c9379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13d625c9-77ec-4edb-a56b-9f37a314cc39", "external-id": "nsx-vlan-transportzone-358", "segmentation_id": 358, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe1aa30b-c9", "ovs_interfaceid": "fe1aa30b-c99e-4641-9d91-c99d20670de0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1627.917240] env[63024]: DEBUG oslo_vmware.api [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52701631-1eea-b877-4d87-3bf94c8da514, 'name': SearchDatastore_Task, 'duration_secs': 0.024491} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1627.919384] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1627.920600] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] d49eae54-cccb-4281-aaa0-d6974529eb7b/d49eae54-cccb-4281-aaa0-d6974529eb7b.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1627.920600] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1627.920600] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1627.920922] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f5387bae-b78c-40fa-82a2-1e2584a01309 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.926314] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1627.926314] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7573bec2-b06e-4ae2-8df9-cb667c45c0b9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.934409] env[63024]: DEBUG oslo_vmware.api [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Waiting for the task: (returnval){ [ 1627.934409] env[63024]: value = "task-1950536" [ 1627.934409] env[63024]: _type = "Task" [ 1627.934409] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1627.938921] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1627.939160] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1627.942019] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b895e7bd-6b08-46cf-aac6-40bc4fc7c876 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1627.947246] env[63024]: DEBUG oslo_vmware.api [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950536, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1627.950805] env[63024]: DEBUG oslo_vmware.api [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Waiting for the task: (returnval){ [ 1627.950805] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5291289d-5968-c34c-4967-423a94e0dd43" [ 1627.950805] env[63024]: _type = "Task" [ 1627.950805] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1627.960199] env[63024]: DEBUG oslo_vmware.api [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5291289d-5968-c34c-4967-423a94e0dd43, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.017459] env[63024]: DEBUG nova.scheduler.client.report [None req-2da24e96-7f50-431e-9f5c-a2fe4c29f616 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1628.078460] env[63024]: DEBUG nova.network.neutron [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Updating instance_info_cache with network_info: [{"id": "246e1d4e-5ecf-48af-aca8-d7ee68ab39c7", "address": "fa:16:3e:26:88:6e", "network": {"id": "b22bfe1a-f169-41c3-ac9b-fdcf93268110", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.74", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d78401abb63840f4b461856cfdb6dbbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap246e1d4e-5e", "ovs_interfaceid": "246e1d4e-5ecf-48af-aca8-d7ee68ab39c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1628.182964] env[63024]: DEBUG oslo_vmware.api [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950533, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.302509] env[63024]: DEBUG oslo_vmware.api [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1950535, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.398607] env[63024]: DEBUG oslo_concurrency.lockutils [req-cc353ddc-a97f-4ab1-849d-741ea844afd0 req-ec19d67f-6365-434d-804e-447771f9c4b0 service nova] Releasing lock "refresh_cache-b629b4f8-f79f-4361-b78c-8705a6888a9e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1628.398914] env[63024]: DEBUG nova.compute.manager [req-cc353ddc-a97f-4ab1-849d-741ea844afd0 req-ec19d67f-6365-434d-804e-447771f9c4b0 service nova] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Received event network-changed-630d0aef-3424-48b2-90be-fca999b2ed17 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1628.399154] env[63024]: DEBUG nova.compute.manager [req-cc353ddc-a97f-4ab1-849d-741ea844afd0 req-ec19d67f-6365-434d-804e-447771f9c4b0 service nova] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Refreshing instance network info cache due to event network-changed-630d0aef-3424-48b2-90be-fca999b2ed17. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1628.399317] env[63024]: DEBUG oslo_concurrency.lockutils [req-cc353ddc-a97f-4ab1-849d-741ea844afd0 req-ec19d67f-6365-434d-804e-447771f9c4b0 service nova] Acquiring lock "refresh_cache-e2138192-14e0-43d2-9d19-9820747d7217" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1628.399507] env[63024]: DEBUG oslo_concurrency.lockutils [req-cc353ddc-a97f-4ab1-849d-741ea844afd0 req-ec19d67f-6365-434d-804e-447771f9c4b0 service nova] Acquired lock "refresh_cache-e2138192-14e0-43d2-9d19-9820747d7217" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1628.399800] env[63024]: DEBUG nova.network.neutron [req-cc353ddc-a97f-4ab1-849d-741ea844afd0 req-ec19d67f-6365-434d-804e-447771f9c4b0 service nova] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Refreshing network info cache for port 630d0aef-3424-48b2-90be-fca999b2ed17 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1628.445024] env[63024]: DEBUG oslo_vmware.api [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950536, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.464180] env[63024]: DEBUG oslo_vmware.api [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5291289d-5968-c34c-4967-423a94e0dd43, 'name': SearchDatastore_Task, 'duration_secs': 0.037554} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.465156] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf989823-f313-4a9c-9689-14a93ea67b3a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.471882] env[63024]: DEBUG oslo_vmware.api [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Waiting for the task: (returnval){ [ 1628.471882] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]521b96af-af05-40dd-a6d2-0b12a588672b" [ 1628.471882] env[63024]: _type = "Task" [ 1628.471882] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.481615] env[63024]: DEBUG oslo_vmware.api [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]521b96af-af05-40dd-a6d2-0b12a588672b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.523413] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2da24e96-7f50-431e-9f5c-a2fe4c29f616 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.110s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1628.528971] env[63024]: DEBUG oslo_concurrency.lockutils [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.331s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1628.530699] env[63024]: INFO nova.compute.claims [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1628.584027] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Releasing lock "refresh_cache-e3c9e9de-586d-4baa-b4bb-95c41d527a03" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1628.584027] env[63024]: DEBUG nova.compute.manager [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Instance network_info: |[{"id": "246e1d4e-5ecf-48af-aca8-d7ee68ab39c7", "address": "fa:16:3e:26:88:6e", "network": {"id": "b22bfe1a-f169-41c3-ac9b-fdcf93268110", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.74", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d78401abb63840f4b461856cfdb6dbbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap246e1d4e-5e", "ovs_interfaceid": "246e1d4e-5ecf-48af-aca8-d7ee68ab39c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1628.587171] env[63024]: INFO nova.scheduler.client.report [None req-2da24e96-7f50-431e-9f5c-a2fe4c29f616 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Deleted allocations for instance de31255d-b82f-4f32-82b2-0a8368fe2510 [ 1628.597609] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:26:88:6e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8652fa2-e608-4fe6-8b35-402a40906b40', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '246e1d4e-5ecf-48af-aca8-d7ee68ab39c7', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1628.609905] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Creating folder: Project (009a393f0f504041bd9037d629bc8857). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1628.611201] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3eee79f7-e91b-410b-8ef5-d66333eada12 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.623690] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Created folder: Project (009a393f0f504041bd9037d629bc8857) in parent group-v401959. [ 1628.624033] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Creating folder: Instances. Parent ref: group-v402028. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1628.624325] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ff4bc475-c30a-48ca-b9ce-d1d24acbd1f6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.634357] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Created folder: Instances in parent group-v402028. [ 1628.634661] env[63024]: DEBUG oslo.service.loopingcall [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1628.635214] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1628.635214] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-162b6a7c-a3fe-4c77-8e69-eb79eb26fd29 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.659550] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1628.659550] env[63024]: value = "task-1950539" [ 1628.659550] env[63024]: _type = "Task" [ 1628.659550] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1628.669768] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950539, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.683609] env[63024]: DEBUG oslo_vmware.api [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950533, 'name': PowerOnVM_Task, 'duration_secs': 1.124124} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.683987] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1628.684121] env[63024]: INFO nova.compute.manager [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Took 12.47 seconds to spawn the instance on the hypervisor. [ 1628.684307] env[63024]: DEBUG nova.compute.manager [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1628.685134] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96671a6b-ea95-4c18-b0be-8979e35ddc36 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.692115] env[63024]: DEBUG nova.compute.manager [req-cde1659e-9b0e-4bdb-b3cd-2c54d3b7b1e8 req-6c6f1137-3655-40b7-bd0b-232c9fb302e6 service nova] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Received event network-changed-776bbe97-34ad-47f3-9045-81bb3c16a126 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1628.692324] env[63024]: DEBUG nova.compute.manager [req-cde1659e-9b0e-4bdb-b3cd-2c54d3b7b1e8 req-6c6f1137-3655-40b7-bd0b-232c9fb302e6 service nova] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Refreshing instance network info cache due to event network-changed-776bbe97-34ad-47f3-9045-81bb3c16a126. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1628.692517] env[63024]: DEBUG oslo_concurrency.lockutils [req-cde1659e-9b0e-4bdb-b3cd-2c54d3b7b1e8 req-6c6f1137-3655-40b7-bd0b-232c9fb302e6 service nova] Acquiring lock "refresh_cache-d49eae54-cccb-4281-aaa0-d6974529eb7b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1628.692664] env[63024]: DEBUG oslo_concurrency.lockutils [req-cde1659e-9b0e-4bdb-b3cd-2c54d3b7b1e8 req-6c6f1137-3655-40b7-bd0b-232c9fb302e6 service nova] Acquired lock "refresh_cache-d49eae54-cccb-4281-aaa0-d6974529eb7b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1628.692854] env[63024]: DEBUG nova.network.neutron [req-cde1659e-9b0e-4bdb-b3cd-2c54d3b7b1e8 req-6c6f1137-3655-40b7-bd0b-232c9fb302e6 service nova] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Refreshing network info cache for port 776bbe97-34ad-47f3-9045-81bb3c16a126 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1628.814149] env[63024]: DEBUG oslo_vmware.api [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1950535, 'name': PowerOnVM_Task, 'duration_secs': 0.513199} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.815395] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1628.815395] env[63024]: INFO nova.compute.manager [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Took 10.02 seconds to spawn the instance on the hypervisor. [ 1628.815395] env[63024]: DEBUG nova.compute.manager [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1628.817880] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16165333-acd8-41c7-9dc5-d6ad2daebc78 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.952778] env[63024]: DEBUG oslo_vmware.api [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950536, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1628.984493] env[63024]: DEBUG oslo_vmware.api [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]521b96af-af05-40dd-a6d2-0b12a588672b, 'name': SearchDatastore_Task, 'duration_secs': 0.073053} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1628.984871] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1628.985210] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 726d9639-1ab4-46a9-975e-5580c8344a37/726d9639-1ab4-46a9-975e-5580c8344a37.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1628.985534] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-54166929-cd7e-4171-ba22-01680a9c5bbd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1628.993475] env[63024]: DEBUG oslo_vmware.api [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Waiting for the task: (returnval){ [ 1628.993475] env[63024]: value = "task-1950540" [ 1628.993475] env[63024]: _type = "Task" [ 1628.993475] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1629.003401] env[63024]: DEBUG oslo_vmware.api [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Task: {'id': task-1950540, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.103367] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2da24e96-7f50-431e-9f5c-a2fe4c29f616 tempest-ServerAddressesNegativeTestJSON-760774595 tempest-ServerAddressesNegativeTestJSON-760774595-project-member] Lock "de31255d-b82f-4f32-82b2-0a8368fe2510" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.016s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1629.171095] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950539, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.212178] env[63024]: INFO nova.compute.manager [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Took 38.85 seconds to build instance. [ 1629.263156] env[63024]: DEBUG nova.network.neutron [req-cc353ddc-a97f-4ab1-849d-741ea844afd0 req-ec19d67f-6365-434d-804e-447771f9c4b0 service nova] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Updated VIF entry in instance network info cache for port 630d0aef-3424-48b2-90be-fca999b2ed17. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1629.263156] env[63024]: DEBUG nova.network.neutron [req-cc353ddc-a97f-4ab1-849d-741ea844afd0 req-ec19d67f-6365-434d-804e-447771f9c4b0 service nova] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Updating instance_info_cache with network_info: [{"id": "630d0aef-3424-48b2-90be-fca999b2ed17", "address": "fa:16:3e:9f:98:e3", "network": {"id": "dab57617-8c96-4c9c-a117-05fd2262c951", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1124018667-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "751ed00ef16a4cca832e3c78731c9379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13d625c9-77ec-4edb-a56b-9f37a314cc39", "external-id": "nsx-vlan-transportzone-358", "segmentation_id": 358, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap630d0aef-34", "ovs_interfaceid": "630d0aef-3424-48b2-90be-fca999b2ed17", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1629.346608] env[63024]: INFO nova.compute.manager [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Took 35.46 seconds to build instance. [ 1629.374380] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6b228a5e-aba2-447e-b46a-ca69a68fc19f tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Acquiring lock "e2138192-14e0-43d2-9d19-9820747d7217" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1629.374657] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6b228a5e-aba2-447e-b46a-ca69a68fc19f tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Lock "e2138192-14e0-43d2-9d19-9820747d7217" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1629.374869] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6b228a5e-aba2-447e-b46a-ca69a68fc19f tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Acquiring lock "e2138192-14e0-43d2-9d19-9820747d7217-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1629.375074] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6b228a5e-aba2-447e-b46a-ca69a68fc19f tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Lock "e2138192-14e0-43d2-9d19-9820747d7217-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1629.375471] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6b228a5e-aba2-447e-b46a-ca69a68fc19f tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Lock "e2138192-14e0-43d2-9d19-9820747d7217-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1629.377852] env[63024]: INFO nova.compute.manager [None req-6b228a5e-aba2-447e-b46a-ca69a68fc19f tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Terminating instance [ 1629.445912] env[63024]: DEBUG oslo_vmware.api [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950536, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.504258] env[63024]: DEBUG oslo_vmware.api [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Task: {'id': task-1950540, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.587590] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e2912dc9-5a6f-41f1-b223-953eb8bd64f9 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Acquiring lock "f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1629.587856] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e2912dc9-5a6f-41f1-b223-953eb8bd64f9 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Lock "f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1629.588059] env[63024]: DEBUG nova.compute.manager [None req-e2912dc9-5a6f-41f1-b223-953eb8bd64f9 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Going to confirm migration 1 {{(pid=63024) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5113}} [ 1629.680625] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950539, 'name': CreateVM_Task, 'duration_secs': 0.567522} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1629.683335] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1629.684643] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1629.684643] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1629.684921] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1629.689030] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-765a77f4-a324-4251-8910-356be2387887 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.690727] env[63024]: DEBUG oslo_vmware.api [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Waiting for the task: (returnval){ [ 1629.690727] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]523c48d6-5846-9cc2-287f-5a28e55b1477" [ 1629.690727] env[63024]: _type = "Task" [ 1629.690727] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1629.707635] env[63024]: DEBUG oslo_vmware.api [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]523c48d6-5846-9cc2-287f-5a28e55b1477, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.715932] env[63024]: DEBUG oslo_concurrency.lockutils [None req-477c5fd4-280a-4715-9b6e-34b8c2b6ab0a tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Lock "bd07735a-6a75-45fb-9cef-e1f2c301a489" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.677s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1629.766418] env[63024]: DEBUG oslo_concurrency.lockutils [req-cc353ddc-a97f-4ab1-849d-741ea844afd0 req-ec19d67f-6365-434d-804e-447771f9c4b0 service nova] Releasing lock "refresh_cache-e2138192-14e0-43d2-9d19-9820747d7217" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1629.766817] env[63024]: DEBUG nova.compute.manager [req-cc353ddc-a97f-4ab1-849d-741ea844afd0 req-ec19d67f-6365-434d-804e-447771f9c4b0 service nova] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Received event network-vif-plugged-776bbe97-34ad-47f3-9045-81bb3c16a126 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1629.767159] env[63024]: DEBUG oslo_concurrency.lockutils [req-cc353ddc-a97f-4ab1-849d-741ea844afd0 req-ec19d67f-6365-434d-804e-447771f9c4b0 service nova] Acquiring lock "d49eae54-cccb-4281-aaa0-d6974529eb7b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1629.767506] env[63024]: DEBUG oslo_concurrency.lockutils [req-cc353ddc-a97f-4ab1-849d-741ea844afd0 req-ec19d67f-6365-434d-804e-447771f9c4b0 service nova] Lock "d49eae54-cccb-4281-aaa0-d6974529eb7b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1629.767872] env[63024]: DEBUG oslo_concurrency.lockutils [req-cc353ddc-a97f-4ab1-849d-741ea844afd0 req-ec19d67f-6365-434d-804e-447771f9c4b0 service nova] Lock "d49eae54-cccb-4281-aaa0-d6974529eb7b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1629.768071] env[63024]: DEBUG nova.compute.manager [req-cc353ddc-a97f-4ab1-849d-741ea844afd0 req-ec19d67f-6365-434d-804e-447771f9c4b0 service nova] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] No waiting events found dispatching network-vif-plugged-776bbe97-34ad-47f3-9045-81bb3c16a126 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1629.768337] env[63024]: WARNING nova.compute.manager [req-cc353ddc-a97f-4ab1-849d-741ea844afd0 req-ec19d67f-6365-434d-804e-447771f9c4b0 service nova] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Received unexpected event network-vif-plugged-776bbe97-34ad-47f3-9045-81bb3c16a126 for instance with vm_state building and task_state spawning. [ 1629.813823] env[63024]: DEBUG nova.network.neutron [req-cde1659e-9b0e-4bdb-b3cd-2c54d3b7b1e8 req-6c6f1137-3655-40b7-bd0b-232c9fb302e6 service nova] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Updated VIF entry in instance network info cache for port 776bbe97-34ad-47f3-9045-81bb3c16a126. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1629.814263] env[63024]: DEBUG nova.network.neutron [req-cde1659e-9b0e-4bdb-b3cd-2c54d3b7b1e8 req-6c6f1137-3655-40b7-bd0b-232c9fb302e6 service nova] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Updating instance_info_cache with network_info: [{"id": "776bbe97-34ad-47f3-9045-81bb3c16a126", "address": "fa:16:3e:e6:1d:cf", "network": {"id": "f42f7ff4-b2ef-45fd-8230-5f16271d5808", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-607041553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "5489a064ee1d44f0bd6c496f4775b9d6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56834f67-27a8-43dc-bbc6-a74aaa08959b", "external-id": "nsx-vlan-transportzone-949", "segmentation_id": 949, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap776bbe97-34", "ovs_interfaceid": "776bbe97-34ad-47f3-9045-81bb3c16a126", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1629.849809] env[63024]: DEBUG oslo_concurrency.lockutils [None req-15671a00-804e-4059-93b8-d456542b9d62 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "9cf45c3a-2a74-4f8e-8817-47bbd748a44b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.362s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1629.886952] env[63024]: DEBUG nova.compute.manager [None req-6b228a5e-aba2-447e-b46a-ca69a68fc19f tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1629.887120] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-6b228a5e-aba2-447e-b46a-ca69a68fc19f tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1629.888532] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be361ab3-c15f-4dab-b0ed-564e9cca45fb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.897057] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b228a5e-aba2-447e-b46a-ca69a68fc19f tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1629.900540] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bfcb7196-31ea-409e-81d9-0b38d0d5c75a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.904021] env[63024]: DEBUG oslo_vmware.api [None req-6b228a5e-aba2-447e-b46a-ca69a68fc19f tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Waiting for the task: (returnval){ [ 1629.904021] env[63024]: value = "task-1950541" [ 1629.904021] env[63024]: _type = "Task" [ 1629.904021] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1629.915675] env[63024]: DEBUG oslo_vmware.api [None req-6b228a5e-aba2-447e-b46a-ca69a68fc19f tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Task: {'id': task-1950541, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1629.949835] env[63024]: DEBUG oslo_vmware.api [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950536, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.756413} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1629.956024] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] d49eae54-cccb-4281-aaa0-d6974529eb7b/d49eae54-cccb-4281-aaa0-d6974529eb7b.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1629.956024] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1629.956024] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-380489b5-cc49-4215-b03d-e64d1f1b0c35 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1629.962200] env[63024]: DEBUG oslo_vmware.api [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Waiting for the task: (returnval){ [ 1629.962200] env[63024]: value = "task-1950542" [ 1629.962200] env[63024]: _type = "Task" [ 1629.962200] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1629.971042] env[63024]: DEBUG oslo_vmware.api [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950542, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.007732] env[63024]: DEBUG oslo_vmware.api [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Task: {'id': task-1950540, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.088126] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d09e4781-5e1b-4f5a-9056-3ca59c27df55 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.101270] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cf575a8-4801-435a-a989-a7a8dc3af5f5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.138733] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75926e1d-e80e-457c-9c2b-13c66c789be9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.147052] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43ae15b6-8f82-4879-b13d-4927b7879197 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.162138] env[63024]: DEBUG nova.compute.provider_tree [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1630.205966] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e2912dc9-5a6f-41f1-b223-953eb8bd64f9 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Acquiring lock "refresh_cache-f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1630.206729] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e2912dc9-5a6f-41f1-b223-953eb8bd64f9 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Acquired lock "refresh_cache-f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1630.206729] env[63024]: DEBUG nova.network.neutron [None req-e2912dc9-5a6f-41f1-b223-953eb8bd64f9 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1630.208125] env[63024]: DEBUG nova.objects.instance [None req-e2912dc9-5a6f-41f1-b223-953eb8bd64f9 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Lazy-loading 'info_cache' on Instance uuid f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1630.214854] env[63024]: DEBUG oslo_vmware.api [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]523c48d6-5846-9cc2-287f-5a28e55b1477, 'name': SearchDatastore_Task, 'duration_secs': 0.016203} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1630.215565] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1630.215853] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1630.216151] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1630.216350] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1630.216588] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1630.216913] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-94988bf0-0830-429b-9849-6488aec58268 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.222038] env[63024]: DEBUG nova.compute.manager [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1630.231299] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1630.231513] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1630.232518] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-503dc896-2192-44f9-95ab-1b32d8843a22 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.238619] env[63024]: DEBUG oslo_vmware.api [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Waiting for the task: (returnval){ [ 1630.238619] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52d40d92-3e7f-5015-867f-ab993f23b955" [ 1630.238619] env[63024]: _type = "Task" [ 1630.238619] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1630.251688] env[63024]: DEBUG oslo_vmware.api [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52d40d92-3e7f-5015-867f-ab993f23b955, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.316891] env[63024]: DEBUG oslo_concurrency.lockutils [req-cde1659e-9b0e-4bdb-b3cd-2c54d3b7b1e8 req-6c6f1137-3655-40b7-bd0b-232c9fb302e6 service nova] Releasing lock "refresh_cache-d49eae54-cccb-4281-aaa0-d6974529eb7b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1630.317157] env[63024]: DEBUG nova.compute.manager [req-cde1659e-9b0e-4bdb-b3cd-2c54d3b7b1e8 req-6c6f1137-3655-40b7-bd0b-232c9fb302e6 service nova] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Received event network-vif-plugged-246e1d4e-5ecf-48af-aca8-d7ee68ab39c7 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1630.318058] env[63024]: DEBUG oslo_concurrency.lockutils [req-cde1659e-9b0e-4bdb-b3cd-2c54d3b7b1e8 req-6c6f1137-3655-40b7-bd0b-232c9fb302e6 service nova] Acquiring lock "e3c9e9de-586d-4baa-b4bb-95c41d527a03-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1630.318058] env[63024]: DEBUG oslo_concurrency.lockutils [req-cde1659e-9b0e-4bdb-b3cd-2c54d3b7b1e8 req-6c6f1137-3655-40b7-bd0b-232c9fb302e6 service nova] Lock "e3c9e9de-586d-4baa-b4bb-95c41d527a03-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1630.318058] env[63024]: DEBUG oslo_concurrency.lockutils [req-cde1659e-9b0e-4bdb-b3cd-2c54d3b7b1e8 req-6c6f1137-3655-40b7-bd0b-232c9fb302e6 service nova] Lock "e3c9e9de-586d-4baa-b4bb-95c41d527a03-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1630.318292] env[63024]: DEBUG nova.compute.manager [req-cde1659e-9b0e-4bdb-b3cd-2c54d3b7b1e8 req-6c6f1137-3655-40b7-bd0b-232c9fb302e6 service nova] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] No waiting events found dispatching network-vif-plugged-246e1d4e-5ecf-48af-aca8-d7ee68ab39c7 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1630.318422] env[63024]: WARNING nova.compute.manager [req-cde1659e-9b0e-4bdb-b3cd-2c54d3b7b1e8 req-6c6f1137-3655-40b7-bd0b-232c9fb302e6 service nova] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Received unexpected event network-vif-plugged-246e1d4e-5ecf-48af-aca8-d7ee68ab39c7 for instance with vm_state building and task_state spawning. [ 1630.318607] env[63024]: DEBUG nova.compute.manager [req-cde1659e-9b0e-4bdb-b3cd-2c54d3b7b1e8 req-6c6f1137-3655-40b7-bd0b-232c9fb302e6 service nova] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Received event network-changed-246e1d4e-5ecf-48af-aca8-d7ee68ab39c7 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1630.318762] env[63024]: DEBUG nova.compute.manager [req-cde1659e-9b0e-4bdb-b3cd-2c54d3b7b1e8 req-6c6f1137-3655-40b7-bd0b-232c9fb302e6 service nova] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Refreshing instance network info cache due to event network-changed-246e1d4e-5ecf-48af-aca8-d7ee68ab39c7. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1630.318959] env[63024]: DEBUG oslo_concurrency.lockutils [req-cde1659e-9b0e-4bdb-b3cd-2c54d3b7b1e8 req-6c6f1137-3655-40b7-bd0b-232c9fb302e6 service nova] Acquiring lock "refresh_cache-e3c9e9de-586d-4baa-b4bb-95c41d527a03" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1630.319118] env[63024]: DEBUG oslo_concurrency.lockutils [req-cde1659e-9b0e-4bdb-b3cd-2c54d3b7b1e8 req-6c6f1137-3655-40b7-bd0b-232c9fb302e6 service nova] Acquired lock "refresh_cache-e3c9e9de-586d-4baa-b4bb-95c41d527a03" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1630.319298] env[63024]: DEBUG nova.network.neutron [req-cde1659e-9b0e-4bdb-b3cd-2c54d3b7b1e8 req-6c6f1137-3655-40b7-bd0b-232c9fb302e6 service nova] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Refreshing network info cache for port 246e1d4e-5ecf-48af-aca8-d7ee68ab39c7 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1630.353656] env[63024]: DEBUG nova.compute.manager [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1630.416682] env[63024]: DEBUG oslo_vmware.api [None req-6b228a5e-aba2-447e-b46a-ca69a68fc19f tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Task: {'id': task-1950541, 'name': PowerOffVM_Task, 'duration_secs': 0.425652} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1630.416682] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-6b228a5e-aba2-447e-b46a-ca69a68fc19f tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1630.416901] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-6b228a5e-aba2-447e-b46a-ca69a68fc19f tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1630.417075] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6c7b0a58-9fc9-492a-ac80-5c5320ecc1b6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.472876] env[63024]: DEBUG oslo_vmware.api [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950542, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.249553} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1630.473948] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1630.474845] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-122b947f-da13-42f6-b084-3ee767ff27f1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.500875] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Reconfiguring VM instance instance-00000016 to attach disk [datastore1] d49eae54-cccb-4281-aaa0-d6974529eb7b/d49eae54-cccb-4281-aaa0-d6974529eb7b.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1630.500875] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-179f0503-ae6b-4538-8391-1bb3577685f6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.528133] env[63024]: DEBUG oslo_vmware.api [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Task: {'id': task-1950540, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.193743} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1630.529714] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 726d9639-1ab4-46a9-975e-5580c8344a37/726d9639-1ab4-46a9-975e-5580c8344a37.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1630.530212] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1630.530579] env[63024]: DEBUG oslo_vmware.api [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Waiting for the task: (returnval){ [ 1630.530579] env[63024]: value = "task-1950544" [ 1630.530579] env[63024]: _type = "Task" [ 1630.530579] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1630.530779] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5c9186e2-843f-41c5-a253-bae41ef7888b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.544613] env[63024]: DEBUG oslo_vmware.api [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950544, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.545999] env[63024]: DEBUG oslo_vmware.api [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Waiting for the task: (returnval){ [ 1630.545999] env[63024]: value = "task-1950545" [ 1630.545999] env[63024]: _type = "Task" [ 1630.545999] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1630.558871] env[63024]: DEBUG oslo_vmware.api [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Task: {'id': task-1950545, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.568129] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-6b228a5e-aba2-447e-b46a-ca69a68fc19f tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1630.568471] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-6b228a5e-aba2-447e-b46a-ca69a68fc19f tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1630.568537] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b228a5e-aba2-447e-b46a-ca69a68fc19f tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Deleting the datastore file [datastore1] e2138192-14e0-43d2-9d19-9820747d7217 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1630.568786] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c54bf46d-ea19-4e32-9b3c-5aca9b4424a4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.576281] env[63024]: DEBUG oslo_vmware.api [None req-6b228a5e-aba2-447e-b46a-ca69a68fc19f tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Waiting for the task: (returnval){ [ 1630.576281] env[63024]: value = "task-1950546" [ 1630.576281] env[63024]: _type = "Task" [ 1630.576281] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1630.584898] env[63024]: DEBUG oslo_vmware.api [None req-6b228a5e-aba2-447e-b46a-ca69a68fc19f tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Task: {'id': task-1950546, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.686772] env[63024]: ERROR nova.scheduler.client.report [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [req-8a2bee37-71fa-4d1c-9bee-36a44b977afe] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 89dfa68a-133e-436f-a9f1-86051f9fb96b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-8a2bee37-71fa-4d1c-9bee-36a44b977afe"}]} [ 1630.715112] env[63024]: DEBUG nova.scheduler.client.report [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Refreshing inventories for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1630.739036] env[63024]: DEBUG nova.scheduler.client.report [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Updating ProviderTree inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1630.739036] env[63024]: DEBUG nova.compute.provider_tree [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1630.747674] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1630.753140] env[63024]: DEBUG oslo_vmware.api [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52d40d92-3e7f-5015-867f-ab993f23b955, 'name': SearchDatastore_Task, 'duration_secs': 0.012375} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1630.754042] env[63024]: DEBUG nova.scheduler.client.report [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Refreshing aggregate associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, aggregates: None {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1630.756808] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73dc064f-37dc-46b2-b206-0bea993ecfe5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1630.764984] env[63024]: DEBUG oslo_vmware.api [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Waiting for the task: (returnval){ [ 1630.764984] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5232c7b1-392d-500a-7e40-f4ca71293274" [ 1630.764984] env[63024]: _type = "Task" [ 1630.764984] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1630.774405] env[63024]: DEBUG oslo_vmware.api [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5232c7b1-392d-500a-7e40-f4ca71293274, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1630.784339] env[63024]: DEBUG nova.scheduler.client.report [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Refreshing trait associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1630.885289] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1631.052493] env[63024]: DEBUG oslo_vmware.api [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950544, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.061269] env[63024]: DEBUG oslo_vmware.api [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Task: {'id': task-1950545, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070863} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1631.061356] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1631.065604] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-071e67b4-70d0-42cf-97e7-68e593fbd7b9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.088972] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Reconfiguring VM instance instance-0000000f to attach disk [datastore1] 726d9639-1ab4-46a9-975e-5580c8344a37/726d9639-1ab4-46a9-975e-5580c8344a37.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1631.095460] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-92c5cb46-2d99-4cc4-a053-1b87bb4c8f1e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.117766] env[63024]: DEBUG oslo_vmware.api [None req-6b228a5e-aba2-447e-b46a-ca69a68fc19f tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Task: {'id': task-1950546, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.32732} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1631.121433] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-6b228a5e-aba2-447e-b46a-ca69a68fc19f tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1631.121633] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-6b228a5e-aba2-447e-b46a-ca69a68fc19f tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1631.121821] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-6b228a5e-aba2-447e-b46a-ca69a68fc19f tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1631.122039] env[63024]: INFO nova.compute.manager [None req-6b228a5e-aba2-447e-b46a-ca69a68fc19f tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Took 1.24 seconds to destroy the instance on the hypervisor. [ 1631.122289] env[63024]: DEBUG oslo.service.loopingcall [None req-6b228a5e-aba2-447e-b46a-ca69a68fc19f tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1631.122561] env[63024]: DEBUG oslo_vmware.api [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Waiting for the task: (returnval){ [ 1631.122561] env[63024]: value = "task-1950547" [ 1631.122561] env[63024]: _type = "Task" [ 1631.122561] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1631.122745] env[63024]: DEBUG nova.compute.manager [-] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1631.122837] env[63024]: DEBUG nova.network.neutron [-] [instance: e2138192-14e0-43d2-9d19-9820747d7217] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1631.135683] env[63024]: DEBUG oslo_vmware.api [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Task: {'id': task-1950547, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.276841] env[63024]: DEBUG oslo_vmware.api [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5232c7b1-392d-500a-7e40-f4ca71293274, 'name': SearchDatastore_Task, 'duration_secs': 0.024463} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1631.280815] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1631.281138] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] e3c9e9de-586d-4baa-b4bb-95c41d527a03/e3c9e9de-586d-4baa-b4bb-95c41d527a03.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1631.281686] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5143be8d-0300-4725-9e7f-62f059f0c81b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.293579] env[63024]: DEBUG oslo_vmware.api [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Waiting for the task: (returnval){ [ 1631.293579] env[63024]: value = "task-1950548" [ 1631.293579] env[63024]: _type = "Task" [ 1631.293579] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1631.312360] env[63024]: DEBUG oslo_vmware.api [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Task: {'id': task-1950548, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.417149] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f133e7d-8a81-4d20-9162-c165fd89aa61 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.425470] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b58f4121-d913-4199-854c-a3b8a06feb12 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.464327] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c35e87d-c54f-49f1-aa2e-e485c0ef331f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.472750] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-428dbd10-a76a-46ac-b5a7-d568fe3ed717 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.488786] env[63024]: DEBUG nova.compute.provider_tree [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1631.543857] env[63024]: DEBUG oslo_vmware.api [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950544, 'name': ReconfigVM_Task, 'duration_secs': 0.545897} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1631.544197] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Reconfigured VM instance instance-00000016 to attach disk [datastore1] d49eae54-cccb-4281-aaa0-d6974529eb7b/d49eae54-cccb-4281-aaa0-d6974529eb7b.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1631.545637] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6b1ab185-dc66-426a-9de3-db01c9bce844 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.558046] env[63024]: DEBUG oslo_vmware.api [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Waiting for the task: (returnval){ [ 1631.558046] env[63024]: value = "task-1950549" [ 1631.558046] env[63024]: _type = "Task" [ 1631.558046] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1631.569515] env[63024]: DEBUG oslo_vmware.api [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950549, 'name': Rename_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.635302] env[63024]: DEBUG oslo_vmware.api [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Task: {'id': task-1950547, 'name': ReconfigVM_Task, 'duration_secs': 0.465213} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1631.635382] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Reconfigured VM instance instance-0000000f to attach disk [datastore1] 726d9639-1ab4-46a9-975e-5580c8344a37/726d9639-1ab4-46a9-975e-5580c8344a37.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1631.638866] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8ae915c1-dc5c-455c-a0b7-c1f4fbda51b1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1631.648977] env[63024]: DEBUG oslo_vmware.api [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Waiting for the task: (returnval){ [ 1631.648977] env[63024]: value = "task-1950550" [ 1631.648977] env[63024]: _type = "Task" [ 1631.648977] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1631.665418] env[63024]: DEBUG oslo_vmware.api [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Task: {'id': task-1950550, 'name': Rename_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.671673] env[63024]: DEBUG nova.network.neutron [req-cde1659e-9b0e-4bdb-b3cd-2c54d3b7b1e8 req-6c6f1137-3655-40b7-bd0b-232c9fb302e6 service nova] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Updated VIF entry in instance network info cache for port 246e1d4e-5ecf-48af-aca8-d7ee68ab39c7. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1631.672284] env[63024]: DEBUG nova.network.neutron [req-cde1659e-9b0e-4bdb-b3cd-2c54d3b7b1e8 req-6c6f1137-3655-40b7-bd0b-232c9fb302e6 service nova] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Updating instance_info_cache with network_info: [{"id": "246e1d4e-5ecf-48af-aca8-d7ee68ab39c7", "address": "fa:16:3e:26:88:6e", "network": {"id": "b22bfe1a-f169-41c3-ac9b-fdcf93268110", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.74", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d78401abb63840f4b461856cfdb6dbbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap246e1d4e-5e", "ovs_interfaceid": "246e1d4e-5ecf-48af-aca8-d7ee68ab39c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1631.815451] env[63024]: DEBUG oslo_vmware.api [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Task: {'id': task-1950548, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1631.996280] env[63024]: DEBUG nova.network.neutron [None req-e2912dc9-5a6f-41f1-b223-953eb8bd64f9 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Updating instance_info_cache with network_info: [{"id": "90fdf5d2-f22d-4b1f-9b65-a0975e5c1cd3", "address": "fa:16:3e:64:8e:6a", "network": {"id": "b22bfe1a-f169-41c3-ac9b-fdcf93268110", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d78401abb63840f4b461856cfdb6dbbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90fdf5d2-f2", "ovs_interfaceid": "90fdf5d2-f22d-4b1f-9b65-a0975e5c1cd3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1632.035840] env[63024]: DEBUG nova.scheduler.client.report [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Updated inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with generation 45 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1632.036120] env[63024]: DEBUG nova.compute.provider_tree [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Updating resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b generation from 45 to 46 during operation: update_inventory {{(pid=63024) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1632.036305] env[63024]: DEBUG nova.compute.provider_tree [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1632.067867] env[63024]: DEBUG oslo_vmware.api [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950549, 'name': Rename_Task, 'duration_secs': 0.502911} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1632.071145] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1632.071453] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8b4eae60-988f-4016-a30f-629cc36641f8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.078410] env[63024]: DEBUG oslo_vmware.api [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Waiting for the task: (returnval){ [ 1632.078410] env[63024]: value = "task-1950551" [ 1632.078410] env[63024]: _type = "Task" [ 1632.078410] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1632.090700] env[63024]: DEBUG oslo_vmware.api [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950551, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.163548] env[63024]: DEBUG oslo_vmware.api [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Task: {'id': task-1950550, 'name': Rename_Task, 'duration_secs': 0.339072} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1632.163865] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1632.164137] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7f626ac5-f65b-4d7f-87ca-4873bfafb3c0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.170075] env[63024]: DEBUG oslo_vmware.api [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Waiting for the task: (returnval){ [ 1632.170075] env[63024]: value = "task-1950552" [ 1632.170075] env[63024]: _type = "Task" [ 1632.170075] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1632.175009] env[63024]: DEBUG oslo_concurrency.lockutils [req-cde1659e-9b0e-4bdb-b3cd-2c54d3b7b1e8 req-6c6f1137-3655-40b7-bd0b-232c9fb302e6 service nova] Releasing lock "refresh_cache-e3c9e9de-586d-4baa-b4bb-95c41d527a03" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1632.180172] env[63024]: DEBUG nova.network.neutron [-] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1632.182816] env[63024]: DEBUG oslo_vmware.api [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Task: {'id': task-1950552, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.252610] env[63024]: DEBUG nova.compute.manager [req-330edd4c-e4e0-4d8a-9965-3c57805b9e95 req-ec6cb006-6713-4122-adb8-1b3bf4247ee4 service nova] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Received event network-changed-182496b0-1eb9-4c3a-a2b9-4f3dec86f48c {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1632.252610] env[63024]: DEBUG nova.compute.manager [req-330edd4c-e4e0-4d8a-9965-3c57805b9e95 req-ec6cb006-6713-4122-adb8-1b3bf4247ee4 service nova] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Refreshing instance network info cache due to event network-changed-182496b0-1eb9-4c3a-a2b9-4f3dec86f48c. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1632.252610] env[63024]: DEBUG oslo_concurrency.lockutils [req-330edd4c-e4e0-4d8a-9965-3c57805b9e95 req-ec6cb006-6713-4122-adb8-1b3bf4247ee4 service nova] Acquiring lock "refresh_cache-9cf45c3a-2a74-4f8e-8817-47bbd748a44b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1632.252610] env[63024]: DEBUG oslo_concurrency.lockutils [req-330edd4c-e4e0-4d8a-9965-3c57805b9e95 req-ec6cb006-6713-4122-adb8-1b3bf4247ee4 service nova] Acquired lock "refresh_cache-9cf45c3a-2a74-4f8e-8817-47bbd748a44b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1632.252610] env[63024]: DEBUG nova.network.neutron [req-330edd4c-e4e0-4d8a-9965-3c57805b9e95 req-ec6cb006-6713-4122-adb8-1b3bf4247ee4 service nova] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Refreshing network info cache for port 182496b0-1eb9-4c3a-a2b9-4f3dec86f48c {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1632.311584] env[63024]: DEBUG oslo_vmware.api [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Task: {'id': task-1950548, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.737988} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1632.311584] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] e3c9e9de-586d-4baa-b4bb-95c41d527a03/e3c9e9de-586d-4baa-b4bb-95c41d527a03.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1632.311584] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1632.312903] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e38c2c9c-e1c8-481e-a567-94b76ccaee06 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.319078] env[63024]: DEBUG oslo_vmware.api [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Waiting for the task: (returnval){ [ 1632.319078] env[63024]: value = "task-1950553" [ 1632.319078] env[63024]: _type = "Task" [ 1632.319078] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1632.329439] env[63024]: DEBUG oslo_vmware.api [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Task: {'id': task-1950553, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.485144] env[63024]: DEBUG nova.compute.manager [req-52ddc829-efec-4cb4-838d-9d52d845a37e req-bdca2ece-3d29-454a-80d3-a57aa5827c08 service nova] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Received event network-changed-6e0e9732-b318-4b20-ad72-8c2bc07eaf34 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1632.485866] env[63024]: DEBUG nova.compute.manager [req-52ddc829-efec-4cb4-838d-9d52d845a37e req-bdca2ece-3d29-454a-80d3-a57aa5827c08 service nova] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Refreshing instance network info cache due to event network-changed-6e0e9732-b318-4b20-ad72-8c2bc07eaf34. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1632.486259] env[63024]: DEBUG oslo_concurrency.lockutils [req-52ddc829-efec-4cb4-838d-9d52d845a37e req-bdca2ece-3d29-454a-80d3-a57aa5827c08 service nova] Acquiring lock "refresh_cache-9716d592-32d1-4f1d-b42b-1c8a7d81d2f2" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1632.486513] env[63024]: DEBUG oslo_concurrency.lockutils [req-52ddc829-efec-4cb4-838d-9d52d845a37e req-bdca2ece-3d29-454a-80d3-a57aa5827c08 service nova] Acquired lock "refresh_cache-9716d592-32d1-4f1d-b42b-1c8a7d81d2f2" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1632.487055] env[63024]: DEBUG nova.network.neutron [req-52ddc829-efec-4cb4-838d-9d52d845a37e req-bdca2ece-3d29-454a-80d3-a57aa5827c08 service nova] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Refreshing network info cache for port 6e0e9732-b318-4b20-ad72-8c2bc07eaf34 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1632.506169] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e2912dc9-5a6f-41f1-b223-953eb8bd64f9 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Releasing lock "refresh_cache-f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1632.506169] env[63024]: DEBUG nova.objects.instance [None req-e2912dc9-5a6f-41f1-b223-953eb8bd64f9 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Lazy-loading 'migration_context' on Instance uuid f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1632.543438] env[63024]: DEBUG oslo_concurrency.lockutils [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.014s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1632.544838] env[63024]: DEBUG nova.compute.manager [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1632.551394] env[63024]: DEBUG oslo_concurrency.lockutils [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.783s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1632.553893] env[63024]: INFO nova.compute.claims [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1632.592438] env[63024]: DEBUG oslo_vmware.api [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950551, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.682715] env[63024]: INFO nova.compute.manager [-] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Took 1.56 seconds to deallocate network for instance. [ 1632.683201] env[63024]: DEBUG oslo_vmware.api [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Task: {'id': task-1950552, 'name': PowerOnVM_Task} progress is 1%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.831557] env[63024]: DEBUG oslo_vmware.api [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Task: {'id': task-1950553, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.128351} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1632.832488] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1632.833943] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c329765-ae5b-45c5-8439-185f100ec646 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.865912] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Reconfiguring VM instance instance-00000017 to attach disk [datastore1] e3c9e9de-586d-4baa-b4bb-95c41d527a03/e3c9e9de-586d-4baa-b4bb-95c41d527a03.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1632.869883] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c735374b-5eb5-4847-82f7-1ad65affdcae {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1632.891854] env[63024]: DEBUG oslo_vmware.api [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Waiting for the task: (returnval){ [ 1632.891854] env[63024]: value = "task-1950554" [ 1632.891854] env[63024]: _type = "Task" [ 1632.891854] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1632.903733] env[63024]: DEBUG oslo_vmware.api [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Task: {'id': task-1950554, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1632.909336] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Acquiring lock "28b3bfc7-2bed-4941-9f48-8bd301e1a971" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1632.909658] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Lock "28b3bfc7-2bed-4941-9f48-8bd301e1a971" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1633.009873] env[63024]: DEBUG nova.objects.base [None req-e2912dc9-5a6f-41f1-b223-953eb8bd64f9 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=63024) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1633.010746] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ede4f55d-e569-4139-906c-b14ad128c88c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.036038] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92d7d012-f832-406a-aa63-e5ff3c534e8f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.047266] env[63024]: DEBUG oslo_vmware.api [None req-e2912dc9-5a6f-41f1-b223-953eb8bd64f9 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Waiting for the task: (returnval){ [ 1633.047266] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52afc4f2-6333-d762-c502-65fb1da89240" [ 1633.047266] env[63024]: _type = "Task" [ 1633.047266] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1633.056365] env[63024]: DEBUG oslo_vmware.api [None req-e2912dc9-5a6f-41f1-b223-953eb8bd64f9 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52afc4f2-6333-d762-c502-65fb1da89240, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.060053] env[63024]: DEBUG nova.compute.utils [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1633.061543] env[63024]: DEBUG nova.compute.manager [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1633.061731] env[63024]: DEBUG nova.network.neutron [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1633.091943] env[63024]: DEBUG oslo_vmware.api [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950551, 'name': PowerOnVM_Task, 'duration_secs': 0.776523} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1633.094448] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1633.094731] env[63024]: INFO nova.compute.manager [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Took 11.65 seconds to spawn the instance on the hypervisor. [ 1633.094835] env[63024]: DEBUG nova.compute.manager [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1633.095825] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93cfd37c-c4b8-4e6a-9ef4-c56972c3f7d9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1633.185532] env[63024]: DEBUG oslo_vmware.api [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Task: {'id': task-1950552, 'name': PowerOnVM_Task} progress is 64%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.189704] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6b228a5e-aba2-447e-b46a-ca69a68fc19f tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1633.227976] env[63024]: DEBUG nova.policy [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9fbf54f1bffb4547906415722a5625ea', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c3f84db03a9047f0bb937929cb979cf2', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1633.262142] env[63024]: DEBUG nova.network.neutron [req-330edd4c-e4e0-4d8a-9965-3c57805b9e95 req-ec6cb006-6713-4122-adb8-1b3bf4247ee4 service nova] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Updated VIF entry in instance network info cache for port 182496b0-1eb9-4c3a-a2b9-4f3dec86f48c. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1633.262523] env[63024]: DEBUG nova.network.neutron [req-330edd4c-e4e0-4d8a-9965-3c57805b9e95 req-ec6cb006-6713-4122-adb8-1b3bf4247ee4 service nova] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Updating instance_info_cache with network_info: [{"id": "182496b0-1eb9-4c3a-a2b9-4f3dec86f48c", "address": "fa:16:3e:2f:45:02", "network": {"id": "ffb24eaf-c6b6-414f-a69a-0c8806712ddd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-281590202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9521048e807c4ca2a6d2e74a72b829a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap182496b0-1e", "ovs_interfaceid": "182496b0-1eb9-4c3a-a2b9-4f3dec86f48c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1633.403430] env[63024]: DEBUG oslo_vmware.api [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Task: {'id': task-1950554, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.404072] env[63024]: DEBUG nova.network.neutron [req-52ddc829-efec-4cb4-838d-9d52d845a37e req-bdca2ece-3d29-454a-80d3-a57aa5827c08 service nova] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Updated VIF entry in instance network info cache for port 6e0e9732-b318-4b20-ad72-8c2bc07eaf34. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1633.404870] env[63024]: DEBUG nova.network.neutron [req-52ddc829-efec-4cb4-838d-9d52d845a37e req-bdca2ece-3d29-454a-80d3-a57aa5827c08 service nova] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Updating instance_info_cache with network_info: [{"id": "6e0e9732-b318-4b20-ad72-8c2bc07eaf34", "address": "fa:16:3e:2b:cc:65", "network": {"id": "0cbe22f7-6322-4d92-9a77-2753f6449a2d", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-366684254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.141", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6072e8931d9540ad8fe4a2b4b1ec782d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3ccbdbb-8b49-4a26-913f-2a448b72280f", "external-id": "nsx-vlan-transportzone-412", "segmentation_id": 412, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e0e9732-b3", "ovs_interfaceid": "6e0e9732-b318-4b20-ad72-8c2bc07eaf34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1633.559889] env[63024]: DEBUG oslo_vmware.api [None req-e2912dc9-5a6f-41f1-b223-953eb8bd64f9 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52afc4f2-6333-d762-c502-65fb1da89240, 'name': SearchDatastore_Task, 'duration_secs': 0.01566} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1633.560243] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e2912dc9-5a6f-41f1-b223-953eb8bd64f9 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1633.566956] env[63024]: DEBUG nova.compute.manager [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1633.624535] env[63024]: INFO nova.compute.manager [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Took 39.21 seconds to build instance. [ 1633.683376] env[63024]: DEBUG oslo_vmware.api [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Task: {'id': task-1950552, 'name': PowerOnVM_Task} progress is 82%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.765711] env[63024]: DEBUG oslo_concurrency.lockutils [req-330edd4c-e4e0-4d8a-9965-3c57805b9e95 req-ec6cb006-6713-4122-adb8-1b3bf4247ee4 service nova] Releasing lock "refresh_cache-9cf45c3a-2a74-4f8e-8817-47bbd748a44b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1633.808331] env[63024]: DEBUG nova.network.neutron [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] Successfully created port: f9a7bea2-256c-4de5-9b61-91fa78298f4a {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1633.904450] env[63024]: DEBUG oslo_vmware.api [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Task: {'id': task-1950554, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1633.907658] env[63024]: DEBUG oslo_concurrency.lockutils [req-52ddc829-efec-4cb4-838d-9d52d845a37e req-bdca2ece-3d29-454a-80d3-a57aa5827c08 service nova] Releasing lock "refresh_cache-9716d592-32d1-4f1d-b42b-1c8a7d81d2f2" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1633.907906] env[63024]: DEBUG nova.compute.manager [req-52ddc829-efec-4cb4-838d-9d52d845a37e req-bdca2ece-3d29-454a-80d3-a57aa5827c08 service nova] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Received event network-changed-630d0aef-3424-48b2-90be-fca999b2ed17 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1633.908148] env[63024]: DEBUG nova.compute.manager [req-52ddc829-efec-4cb4-838d-9d52d845a37e req-bdca2ece-3d29-454a-80d3-a57aa5827c08 service nova] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Refreshing instance network info cache due to event network-changed-630d0aef-3424-48b2-90be-fca999b2ed17. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1633.908421] env[63024]: DEBUG oslo_concurrency.lockutils [req-52ddc829-efec-4cb4-838d-9d52d845a37e req-bdca2ece-3d29-454a-80d3-a57aa5827c08 service nova] Acquiring lock "refresh_cache-e2138192-14e0-43d2-9d19-9820747d7217" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1633.908686] env[63024]: DEBUG oslo_concurrency.lockutils [req-52ddc829-efec-4cb4-838d-9d52d845a37e req-bdca2ece-3d29-454a-80d3-a57aa5827c08 service nova] Acquired lock "refresh_cache-e2138192-14e0-43d2-9d19-9820747d7217" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1633.908787] env[63024]: DEBUG nova.network.neutron [req-52ddc829-efec-4cb4-838d-9d52d845a37e req-bdca2ece-3d29-454a-80d3-a57aa5827c08 service nova] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Refreshing network info cache for port 630d0aef-3424-48b2-90be-fca999b2ed17 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1634.096179] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59363279-a786-4d4f-b177-0bcf2540d105 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.104097] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e5bd19a-fb41-4ece-9c4d-f92f78450b62 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.146075] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f07c6a94-6699-4376-82bd-8cc4c0cb14ca tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Lock "d49eae54-cccb-4281-aaa0-d6974529eb7b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.508s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1634.152039] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f78ec3a-0e28-4ceb-993a-744c64751489 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.160249] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-451f078e-addf-47b6-b052-21c4665b9fe9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.179880] env[63024]: DEBUG nova.compute.provider_tree [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1634.193873] env[63024]: DEBUG oslo_vmware.api [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Task: {'id': task-1950552, 'name': PowerOnVM_Task, 'duration_secs': 1.546473} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1634.194129] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1634.194982] env[63024]: DEBUG nova.compute.manager [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1634.195508] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9de02a46-b02a-4d7b-a6fc-a3a03c0c2d46 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.406889] env[63024]: DEBUG oslo_vmware.api [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Task: {'id': task-1950554, 'name': ReconfigVM_Task, 'duration_secs': 1.074049} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1634.407765] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Reconfigured VM instance instance-00000017 to attach disk [datastore1] e3c9e9de-586d-4baa-b4bb-95c41d527a03/e3c9e9de-586d-4baa-b4bb-95c41d527a03.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1634.408462] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ab665aea-adff-4f05-9838-41afd16505d6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.415621] env[63024]: DEBUG oslo_vmware.api [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Waiting for the task: (returnval){ [ 1634.415621] env[63024]: value = "task-1950555" [ 1634.415621] env[63024]: _type = "Task" [ 1634.415621] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1634.425085] env[63024]: DEBUG oslo_vmware.api [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Task: {'id': task-1950555, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1634.434621] env[63024]: DEBUG nova.network.neutron [req-52ddc829-efec-4cb4-838d-9d52d845a37e req-bdca2ece-3d29-454a-80d3-a57aa5827c08 service nova] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1634.582854] env[63024]: DEBUG nova.network.neutron [req-52ddc829-efec-4cb4-838d-9d52d845a37e req-bdca2ece-3d29-454a-80d3-a57aa5827c08 service nova] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1634.589268] env[63024]: DEBUG nova.compute.manager [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1634.616579] env[63024]: DEBUG nova.virt.hardware [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1634.617078] env[63024]: DEBUG nova.virt.hardware [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1634.617381] env[63024]: DEBUG nova.virt.hardware [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1634.617709] env[63024]: DEBUG nova.virt.hardware [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1634.618134] env[63024]: DEBUG nova.virt.hardware [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1634.618428] env[63024]: DEBUG nova.virt.hardware [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1634.618921] env[63024]: DEBUG nova.virt.hardware [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1634.619240] env[63024]: DEBUG nova.virt.hardware [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1634.619577] env[63024]: DEBUG nova.virt.hardware [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1634.621023] env[63024]: DEBUG nova.virt.hardware [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1634.621023] env[63024]: DEBUG nova.virt.hardware [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1634.622023] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-619462ed-2e24-4c99-bcbf-e86f6993273c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.635683] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fa8028a-205e-4d50-aeb4-14ad43ec1205 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.652867] env[63024]: DEBUG nova.compute.manager [None req-63097460-d1d2-46a2-812a-bf56f160dfc1 tempest-ServersListShow296Test-2106682744 tempest-ServersListShow296Test-2106682744-project-member] [instance: b420b8c1-7d95-4f84-8396-8e500c9a787a] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1634.688164] env[63024]: DEBUG nova.scheduler.client.report [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1634.715673] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1634.926305] env[63024]: DEBUG oslo_vmware.api [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Task: {'id': task-1950555, 'name': Rename_Task, 'duration_secs': 0.261947} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1634.926574] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1634.926818] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cb80ccd3-3af0-4f28-9ff0-eefb7451f2a4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1634.933636] env[63024]: DEBUG oslo_vmware.api [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Waiting for the task: (returnval){ [ 1634.933636] env[63024]: value = "task-1950556" [ 1634.933636] env[63024]: _type = "Task" [ 1634.933636] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1634.941508] env[63024]: DEBUG oslo_vmware.api [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Task: {'id': task-1950556, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.094440] env[63024]: DEBUG oslo_concurrency.lockutils [req-52ddc829-efec-4cb4-838d-9d52d845a37e req-bdca2ece-3d29-454a-80d3-a57aa5827c08 service nova] Releasing lock "refresh_cache-e2138192-14e0-43d2-9d19-9820747d7217" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1635.094440] env[63024]: DEBUG nova.compute.manager [req-52ddc829-efec-4cb4-838d-9d52d845a37e req-bdca2ece-3d29-454a-80d3-a57aa5827c08 service nova] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Received event network-changed-fe1aa30b-c99e-4641-9d91-c99d20670de0 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1635.094440] env[63024]: DEBUG nova.compute.manager [req-52ddc829-efec-4cb4-838d-9d52d845a37e req-bdca2ece-3d29-454a-80d3-a57aa5827c08 service nova] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Refreshing instance network info cache due to event network-changed-fe1aa30b-c99e-4641-9d91-c99d20670de0. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1635.095087] env[63024]: DEBUG oslo_concurrency.lockutils [req-52ddc829-efec-4cb4-838d-9d52d845a37e req-bdca2ece-3d29-454a-80d3-a57aa5827c08 service nova] Acquiring lock "refresh_cache-b629b4f8-f79f-4361-b78c-8705a6888a9e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1635.095252] env[63024]: DEBUG oslo_concurrency.lockutils [req-52ddc829-efec-4cb4-838d-9d52d845a37e req-bdca2ece-3d29-454a-80d3-a57aa5827c08 service nova] Acquired lock "refresh_cache-b629b4f8-f79f-4361-b78c-8705a6888a9e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1635.095423] env[63024]: DEBUG nova.network.neutron [req-52ddc829-efec-4cb4-838d-9d52d845a37e req-bdca2ece-3d29-454a-80d3-a57aa5827c08 service nova] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Refreshing network info cache for port fe1aa30b-c99e-4641-9d91-c99d20670de0 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1635.139650] env[63024]: DEBUG oslo_vmware.rw_handles [None req-92210e6b-9b1b-4682-9fd6-bc1cf800494e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e7cb0e-65ed-bc5b-e727-59560b8cada2/disk-0.vmdk. {{(pid=63024) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1635.140570] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-241a94da-c0da-4dc8-bce5-dae088c80e50 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.148124] env[63024]: DEBUG oslo_vmware.rw_handles [None req-92210e6b-9b1b-4682-9fd6-bc1cf800494e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e7cb0e-65ed-bc5b-e727-59560b8cada2/disk-0.vmdk is in state: ready. {{(pid=63024) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1635.148340] env[63024]: ERROR oslo_vmware.rw_handles [None req-92210e6b-9b1b-4682-9fd6-bc1cf800494e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e7cb0e-65ed-bc5b-e727-59560b8cada2/disk-0.vmdk due to incomplete transfer. [ 1635.148575] env[63024]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-afd405c9-a5e1-4ac6-9c21-2facbca23d7e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.156250] env[63024]: DEBUG oslo_vmware.rw_handles [None req-92210e6b-9b1b-4682-9fd6-bc1cf800494e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e7cb0e-65ed-bc5b-e727-59560b8cada2/disk-0.vmdk. {{(pid=63024) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1635.156468] env[63024]: DEBUG nova.virt.vmwareapi.images [None req-92210e6b-9b1b-4682-9fd6-bc1cf800494e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Uploaded image c9af5d5d-bc8a-499a-81a2-6861192785d1 to the Glance image server {{(pid=63024) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1635.158551] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-92210e6b-9b1b-4682-9fd6-bc1cf800494e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Destroying the VM {{(pid=63024) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1635.159029] env[63024]: DEBUG nova.compute.manager [None req-63097460-d1d2-46a2-812a-bf56f160dfc1 tempest-ServersListShow296Test-2106682744 tempest-ServersListShow296Test-2106682744-project-member] [instance: b420b8c1-7d95-4f84-8396-8e500c9a787a] Instance disappeared before build. {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2446}} [ 1635.161462] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-f311a238-f522-41f1-9fea-4024579fa31a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.173744] env[63024]: DEBUG oslo_vmware.api [None req-92210e6b-9b1b-4682-9fd6-bc1cf800494e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Waiting for the task: (returnval){ [ 1635.173744] env[63024]: value = "task-1950557" [ 1635.173744] env[63024]: _type = "Task" [ 1635.173744] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1635.184427] env[63024]: DEBUG oslo_vmware.api [None req-92210e6b-9b1b-4682-9fd6-bc1cf800494e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950557, 'name': Destroy_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.194656] env[63024]: DEBUG oslo_concurrency.lockutils [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.643s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1635.195565] env[63024]: DEBUG nova.compute.manager [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1635.198435] env[63024]: DEBUG oslo_concurrency.lockutils [None req-873d6a6c-5457-40be-80dc-b062a2d3e47a tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.095s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1635.199013] env[63024]: DEBUG nova.objects.instance [None req-873d6a6c-5457-40be-80dc-b062a2d3e47a tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Lazy-loading 'resources' on Instance uuid 6e477ec2-9270-42b1-85bd-a315460d9cab {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1635.305031] env[63024]: DEBUG nova.compute.manager [None req-734108b8-059a-4224-9eea-52ce6e096ae6 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1635.305031] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b82c7b89-295c-4516-a31c-a397a186fb0a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.399633] env[63024]: DEBUG nova.compute.manager [req-3a116489-31c9-416b-9079-88222b1ce714 req-ea78512f-efd8-468d-8448-9b8a6b225f7e service nova] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Received event network-vif-deleted-630d0aef-3424-48b2-90be-fca999b2ed17 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1635.446100] env[63024]: DEBUG oslo_vmware.api [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Task: {'id': task-1950556, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.518422] env[63024]: DEBUG nova.compute.manager [req-4135afc9-cb4f-4556-9369-240d8a34a06f req-4569a20e-f251-48f8-bc38-89c83db013f9 service nova] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Received event network-changed-fe1aa30b-c99e-4641-9d91-c99d20670de0 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1635.518735] env[63024]: DEBUG nova.compute.manager [req-4135afc9-cb4f-4556-9369-240d8a34a06f req-4569a20e-f251-48f8-bc38-89c83db013f9 service nova] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Refreshing instance network info cache due to event network-changed-fe1aa30b-c99e-4641-9d91-c99d20670de0. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1635.519130] env[63024]: DEBUG oslo_concurrency.lockutils [req-4135afc9-cb4f-4556-9369-240d8a34a06f req-4569a20e-f251-48f8-bc38-89c83db013f9 service nova] Acquiring lock "refresh_cache-b629b4f8-f79f-4361-b78c-8705a6888a9e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1635.679876] env[63024]: DEBUG oslo_concurrency.lockutils [None req-63097460-d1d2-46a2-812a-bf56f160dfc1 tempest-ServersListShow296Test-2106682744 tempest-ServersListShow296Test-2106682744-project-member] Lock "b420b8c1-7d95-4f84-8396-8e500c9a787a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.006s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1635.688677] env[63024]: DEBUG oslo_vmware.api [None req-92210e6b-9b1b-4682-9fd6-bc1cf800494e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950557, 'name': Destroy_Task} progress is 33%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1635.705832] env[63024]: DEBUG nova.compute.utils [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1635.709661] env[63024]: DEBUG nova.compute.manager [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1635.709955] env[63024]: DEBUG nova.network.neutron [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1635.772187] env[63024]: DEBUG nova.network.neutron [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] Successfully updated port: f9a7bea2-256c-4de5-9b61-91fa78298f4a {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1635.805037] env[63024]: DEBUG nova.policy [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd857e115d7f54be58e8e8dbb20a900d5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a5d51c3beec44aecb65ba72dffb42d40', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1635.825828] env[63024]: INFO nova.compute.manager [None req-734108b8-059a-4224-9eea-52ce6e096ae6 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] instance snapshotting [ 1635.830634] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9be682c-9ff7-46ee-b946-ac73d553dd5b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.858689] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc94fa35-60de-4ece-b037-38b5776f2ed2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1635.944886] env[63024]: DEBUG oslo_vmware.api [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Task: {'id': task-1950556, 'name': PowerOnVM_Task, 'duration_secs': 0.693247} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1635.945627] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1635.945915] env[63024]: INFO nova.compute.manager [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Took 11.87 seconds to spawn the instance on the hypervisor. [ 1635.946176] env[63024]: DEBUG nova.compute.manager [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1635.947116] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d10511d-d909-4443-875f-1664dba58cb4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.001947] env[63024]: DEBUG nova.network.neutron [req-52ddc829-efec-4cb4-838d-9d52d845a37e req-bdca2ece-3d29-454a-80d3-a57aa5827c08 service nova] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Updated VIF entry in instance network info cache for port fe1aa30b-c99e-4641-9d91-c99d20670de0. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1636.002157] env[63024]: DEBUG nova.network.neutron [req-52ddc829-efec-4cb4-838d-9d52d845a37e req-bdca2ece-3d29-454a-80d3-a57aa5827c08 service nova] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Updating instance_info_cache with network_info: [{"id": "fe1aa30b-c99e-4641-9d91-c99d20670de0", "address": "fa:16:3e:ec:ef:e1", "network": {"id": "dab57617-8c96-4c9c-a117-05fd2262c951", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1124018667-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "751ed00ef16a4cca832e3c78731c9379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13d625c9-77ec-4edb-a56b-9f37a314cc39", "external-id": "nsx-vlan-transportzone-358", "segmentation_id": 358, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe1aa30b-c9", "ovs_interfaceid": "fe1aa30b-c99e-4641-9d91-c99d20670de0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1636.113236] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cdec4f1c-4040-4138-93d8-90d551e39ffe tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Acquiring lock "e2138192-14e0-43d2-9d19-9820747d7217" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1636.119058] env[63024]: INFO nova.compute.manager [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Rescuing [ 1636.119058] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Acquiring lock "refresh_cache-d49eae54-cccb-4281-aaa0-d6974529eb7b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1636.119058] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Acquired lock "refresh_cache-d49eae54-cccb-4281-aaa0-d6974529eb7b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1636.119058] env[63024]: DEBUG nova.network.neutron [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1636.187579] env[63024]: DEBUG nova.compute.manager [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1636.193425] env[63024]: DEBUG oslo_vmware.api [None req-92210e6b-9b1b-4682-9fd6-bc1cf800494e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950557, 'name': Destroy_Task, 'duration_secs': 0.789778} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1636.193425] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-92210e6b-9b1b-4682-9fd6-bc1cf800494e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Destroyed the VM [ 1636.193425] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-92210e6b-9b1b-4682-9fd6-bc1cf800494e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Deleting Snapshot of the VM instance {{(pid=63024) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1636.193425] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-fb55b074-555b-4210-8aec-4e8891dc9569 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.205979] env[63024]: DEBUG oslo_vmware.api [None req-92210e6b-9b1b-4682-9fd6-bc1cf800494e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Waiting for the task: (returnval){ [ 1636.205979] env[63024]: value = "task-1950558" [ 1636.205979] env[63024]: _type = "Task" [ 1636.205979] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1636.217506] env[63024]: DEBUG nova.compute.manager [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1636.220847] env[63024]: DEBUG oslo_vmware.api [None req-92210e6b-9b1b-4682-9fd6-bc1cf800494e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950558, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.274218] env[63024]: DEBUG oslo_concurrency.lockutils [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Acquiring lock "refresh_cache-85d6db13-d317-498e-a36a-972e9b36e82b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1636.274627] env[63024]: DEBUG oslo_concurrency.lockutils [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Acquired lock "refresh_cache-85d6db13-d317-498e-a36a-972e9b36e82b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1636.274627] env[63024]: DEBUG nova.network.neutron [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1636.291153] env[63024]: DEBUG nova.network.neutron [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Successfully created port: c1430120-4c82-424a-8155-f1e22eb4a7ae {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1636.314495] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72f94ad2-2b9f-4e1d-8f75-94c6a85b27b3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.323215] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-192308e6-6e0c-4904-b2a1-7b79a7993bdb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.358513] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab8cf79d-9afe-43d4-95bc-36d0b85bdf61 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.368365] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-058d3a35-7027-4925-936d-c26d670ba53c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.375566] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-734108b8-059a-4224-9eea-52ce6e096ae6 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Creating Snapshot of the VM instance {{(pid=63024) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1636.375890] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-2d5291f1-2301-47fa-9c61-4459df6ac6ed {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1636.391558] env[63024]: DEBUG nova.compute.provider_tree [None req-873d6a6c-5457-40be-80dc-b062a2d3e47a tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1636.394864] env[63024]: DEBUG oslo_vmware.api [None req-734108b8-059a-4224-9eea-52ce6e096ae6 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Waiting for the task: (returnval){ [ 1636.394864] env[63024]: value = "task-1950559" [ 1636.394864] env[63024]: _type = "Task" [ 1636.394864] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1636.406165] env[63024]: DEBUG oslo_vmware.api [None req-734108b8-059a-4224-9eea-52ce6e096ae6 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950559, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.473333] env[63024]: INFO nova.compute.manager [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Took 38.97 seconds to build instance. [ 1636.507029] env[63024]: DEBUG oslo_concurrency.lockutils [req-52ddc829-efec-4cb4-838d-9d52d845a37e req-bdca2ece-3d29-454a-80d3-a57aa5827c08 service nova] Releasing lock "refresh_cache-b629b4f8-f79f-4361-b78c-8705a6888a9e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1636.507029] env[63024]: DEBUG oslo_concurrency.lockutils [req-4135afc9-cb4f-4556-9369-240d8a34a06f req-4569a20e-f251-48f8-bc38-89c83db013f9 service nova] Acquired lock "refresh_cache-b629b4f8-f79f-4361-b78c-8705a6888a9e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1636.507029] env[63024]: DEBUG nova.network.neutron [req-4135afc9-cb4f-4556-9369-240d8a34a06f req-4569a20e-f251-48f8-bc38-89c83db013f9 service nova] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Refreshing network info cache for port fe1aa30b-c99e-4641-9d91-c99d20670de0 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1636.573129] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7b8e5583-2124-4efb-b280-1b22454be363 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Acquiring lock "726d9639-1ab4-46a9-975e-5580c8344a37" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1636.573432] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7b8e5583-2124-4efb-b280-1b22454be363 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Lock "726d9639-1ab4-46a9-975e-5580c8344a37" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1636.573665] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7b8e5583-2124-4efb-b280-1b22454be363 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Acquiring lock "726d9639-1ab4-46a9-975e-5580c8344a37-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1636.573862] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7b8e5583-2124-4efb-b280-1b22454be363 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Lock "726d9639-1ab4-46a9-975e-5580c8344a37-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1636.574063] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7b8e5583-2124-4efb-b280-1b22454be363 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Lock "726d9639-1ab4-46a9-975e-5580c8344a37-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1636.576507] env[63024]: INFO nova.compute.manager [None req-7b8e5583-2124-4efb-b280-1b22454be363 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Terminating instance [ 1636.722371] env[63024]: DEBUG oslo_vmware.api [None req-92210e6b-9b1b-4682-9fd6-bc1cf800494e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950558, 'name': RemoveSnapshot_Task} progress is 12%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.723479] env[63024]: DEBUG oslo_concurrency.lockutils [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1636.835295] env[63024]: DEBUG nova.network.neutron [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1636.868696] env[63024]: DEBUG nova.network.neutron [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Updating instance_info_cache with network_info: [{"id": "776bbe97-34ad-47f3-9045-81bb3c16a126", "address": "fa:16:3e:e6:1d:cf", "network": {"id": "f42f7ff4-b2ef-45fd-8230-5f16271d5808", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-607041553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "5489a064ee1d44f0bd6c496f4775b9d6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56834f67-27a8-43dc-bbc6-a74aaa08959b", "external-id": "nsx-vlan-transportzone-949", "segmentation_id": 949, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap776bbe97-34", "ovs_interfaceid": "776bbe97-34ad-47f3-9045-81bb3c16a126", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1636.896974] env[63024]: DEBUG nova.scheduler.client.report [None req-873d6a6c-5457-40be-80dc-b062a2d3e47a tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1636.917575] env[63024]: DEBUG oslo_vmware.api [None req-734108b8-059a-4224-9eea-52ce6e096ae6 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950559, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1636.975953] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fd488b90-1109-47fb-b332-0f1c171746a0 tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Lock "e3c9e9de-586d-4baa-b4bb-95c41d527a03" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.745s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1637.084175] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7b8e5583-2124-4efb-b280-1b22454be363 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Acquiring lock "refresh_cache-726d9639-1ab4-46a9-975e-5580c8344a37" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1637.084393] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7b8e5583-2124-4efb-b280-1b22454be363 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Acquired lock "refresh_cache-726d9639-1ab4-46a9-975e-5580c8344a37" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1637.084678] env[63024]: DEBUG nova.network.neutron [None req-7b8e5583-2124-4efb-b280-1b22454be363 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1637.090996] env[63024]: DEBUG oslo_concurrency.lockutils [None req-28055a88-5b7f-4a76-af3b-355523600d08 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Acquiring lock "17e1dfa2-b104-4aac-928e-6364da155c3d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1637.091268] env[63024]: DEBUG oslo_concurrency.lockutils [None req-28055a88-5b7f-4a76-af3b-355523600d08 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Lock "17e1dfa2-b104-4aac-928e-6364da155c3d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1637.091476] env[63024]: DEBUG oslo_concurrency.lockutils [None req-28055a88-5b7f-4a76-af3b-355523600d08 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Acquiring lock "17e1dfa2-b104-4aac-928e-6364da155c3d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1637.091659] env[63024]: DEBUG oslo_concurrency.lockutils [None req-28055a88-5b7f-4a76-af3b-355523600d08 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Lock "17e1dfa2-b104-4aac-928e-6364da155c3d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1637.091829] env[63024]: DEBUG oslo_concurrency.lockutils [None req-28055a88-5b7f-4a76-af3b-355523600d08 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Lock "17e1dfa2-b104-4aac-928e-6364da155c3d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1637.096681] env[63024]: INFO nova.compute.manager [None req-28055a88-5b7f-4a76-af3b-355523600d08 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Terminating instance [ 1637.104064] env[63024]: DEBUG nova.network.neutron [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] Updating instance_info_cache with network_info: [{"id": "f9a7bea2-256c-4de5-9b61-91fa78298f4a", "address": "fa:16:3e:23:0a:b3", "network": {"id": "b22bfe1a-f169-41c3-ac9b-fdcf93268110", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.183", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d78401abb63840f4b461856cfdb6dbbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9a7bea2-25", "ovs_interfaceid": "f9a7bea2-256c-4de5-9b61-91fa78298f4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1637.217887] env[63024]: DEBUG oslo_vmware.api [None req-92210e6b-9b1b-4682-9fd6-bc1cf800494e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950558, 'name': RemoveSnapshot_Task} progress is 12%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.233507] env[63024]: DEBUG nova.compute.manager [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1637.265550] env[63024]: DEBUG nova.virt.hardware [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1637.265756] env[63024]: DEBUG nova.virt.hardware [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1637.265853] env[63024]: DEBUG nova.virt.hardware [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1637.266137] env[63024]: DEBUG nova.virt.hardware [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1637.266266] env[63024]: DEBUG nova.virt.hardware [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1637.266411] env[63024]: DEBUG nova.virt.hardware [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1637.266616] env[63024]: DEBUG nova.virt.hardware [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1637.266803] env[63024]: DEBUG nova.virt.hardware [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1637.266985] env[63024]: DEBUG nova.virt.hardware [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1637.267219] env[63024]: DEBUG nova.virt.hardware [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1637.267398] env[63024]: DEBUG nova.virt.hardware [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1637.268304] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42556e9c-18ab-42d8-907a-dd819408970a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.277124] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfac1645-01a1-4f62-8e93-94ab9b3e9cdb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.371240] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Releasing lock "refresh_cache-d49eae54-cccb-4281-aaa0-d6974529eb7b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1637.405620] env[63024]: DEBUG oslo_concurrency.lockutils [None req-873d6a6c-5457-40be-80dc-b062a2d3e47a tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.207s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1637.417025] env[63024]: DEBUG oslo_concurrency.lockutils [None req-57b2ef90-159e-4c66-a16d-da3832e84635 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.116s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1637.417025] env[63024]: DEBUG nova.objects.instance [None req-57b2ef90-159e-4c66-a16d-da3832e84635 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Lazy-loading 'resources' on Instance uuid e03b8577-9298-4e88-98ea-6258e97db28d {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1637.425445] env[63024]: DEBUG oslo_vmware.api [None req-734108b8-059a-4224-9eea-52ce6e096ae6 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950559, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.441535] env[63024]: DEBUG nova.compute.manager [None req-a7c7ab9a-d268-4021-8bef-db2d9dbfeb12 tempest-ServerExternalEventsTest-1144202468 tempest-ServerExternalEventsTest-1144202468-project] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Received event network-changed {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1637.441838] env[63024]: DEBUG nova.compute.manager [None req-a7c7ab9a-d268-4021-8bef-db2d9dbfeb12 tempest-ServerExternalEventsTest-1144202468 tempest-ServerExternalEventsTest-1144202468-project] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Refreshing instance network info cache due to event network-changed. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1637.442106] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a7c7ab9a-d268-4021-8bef-db2d9dbfeb12 tempest-ServerExternalEventsTest-1144202468 tempest-ServerExternalEventsTest-1144202468-project] Acquiring lock "refresh_cache-e3c9e9de-586d-4baa-b4bb-95c41d527a03" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1637.442285] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a7c7ab9a-d268-4021-8bef-db2d9dbfeb12 tempest-ServerExternalEventsTest-1144202468 tempest-ServerExternalEventsTest-1144202468-project] Acquired lock "refresh_cache-e3c9e9de-586d-4baa-b4bb-95c41d527a03" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1637.442502] env[63024]: DEBUG nova.network.neutron [None req-a7c7ab9a-d268-4021-8bef-db2d9dbfeb12 tempest-ServerExternalEventsTest-1144202468 tempest-ServerExternalEventsTest-1144202468-project] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1637.445316] env[63024]: INFO nova.scheduler.client.report [None req-873d6a6c-5457-40be-80dc-b062a2d3e47a tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Deleted allocations for instance 6e477ec2-9270-42b1-85bd-a315460d9cab [ 1637.457300] env[63024]: DEBUG nova.network.neutron [req-4135afc9-cb4f-4556-9369-240d8a34a06f req-4569a20e-f251-48f8-bc38-89c83db013f9 service nova] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Updated VIF entry in instance network info cache for port fe1aa30b-c99e-4641-9d91-c99d20670de0. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1637.457300] env[63024]: DEBUG nova.network.neutron [req-4135afc9-cb4f-4556-9369-240d8a34a06f req-4569a20e-f251-48f8-bc38-89c83db013f9 service nova] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Updating instance_info_cache with network_info: [{"id": "fe1aa30b-c99e-4641-9d91-c99d20670de0", "address": "fa:16:3e:ec:ef:e1", "network": {"id": "dab57617-8c96-4c9c-a117-05fd2262c951", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1124018667-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "751ed00ef16a4cca832e3c78731c9379", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "13d625c9-77ec-4edb-a56b-9f37a314cc39", "external-id": "nsx-vlan-transportzone-358", "segmentation_id": 358, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfe1aa30b-c9", "ovs_interfaceid": "fe1aa30b-c99e-4641-9d91-c99d20670de0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1637.482690] env[63024]: DEBUG nova.compute.manager [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1637.604022] env[63024]: DEBUG nova.compute.manager [None req-28055a88-5b7f-4a76-af3b-355523600d08 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1637.604022] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-28055a88-5b7f-4a76-af3b-355523600d08 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1637.604022] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07315295-536e-423b-ac44-a970c6f1a828 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.607033] env[63024]: DEBUG nova.network.neutron [None req-7b8e5583-2124-4efb-b280-1b22454be363 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1637.609201] env[63024]: DEBUG oslo_concurrency.lockutils [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Releasing lock "refresh_cache-85d6db13-d317-498e-a36a-972e9b36e82b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1637.609602] env[63024]: DEBUG nova.compute.manager [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] Instance network_info: |[{"id": "f9a7bea2-256c-4de5-9b61-91fa78298f4a", "address": "fa:16:3e:23:0a:b3", "network": {"id": "b22bfe1a-f169-41c3-ac9b-fdcf93268110", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.183", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d78401abb63840f4b461856cfdb6dbbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9a7bea2-25", "ovs_interfaceid": "f9a7bea2-256c-4de5-9b61-91fa78298f4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1637.610430] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:23:0a:b3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8652fa2-e608-4fe6-8b35-402a40906b40', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f9a7bea2-256c-4de5-9b61-91fa78298f4a', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1637.618610] env[63024]: DEBUG oslo.service.loopingcall [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1637.619757] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1637.620125] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-27f7356b-616c-4182-b365-88d96be8e623 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.640241] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-28055a88-5b7f-4a76-af3b-355523600d08 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1637.641061] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-63458e46-3028-445b-bf89-8bf35d446b92 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.648186] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1637.648186] env[63024]: value = "task-1950560" [ 1637.648186] env[63024]: _type = "Task" [ 1637.648186] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1637.650651] env[63024]: DEBUG oslo_vmware.api [None req-28055a88-5b7f-4a76-af3b-355523600d08 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Waiting for the task: (returnval){ [ 1637.650651] env[63024]: value = "task-1950561" [ 1637.650651] env[63024]: _type = "Task" [ 1637.650651] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1637.658146] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950560, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.666050] env[63024]: DEBUG oslo_vmware.api [None req-28055a88-5b7f-4a76-af3b-355523600d08 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Task: {'id': task-1950561, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1637.714989] env[63024]: DEBUG nova.network.neutron [None req-7b8e5583-2124-4efb-b280-1b22454be363 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1637.724035] env[63024]: DEBUG oslo_vmware.api [None req-92210e6b-9b1b-4682-9fd6-bc1cf800494e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950558, 'name': RemoveSnapshot_Task, 'duration_secs': 1.249352} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1637.724035] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-92210e6b-9b1b-4682-9fd6-bc1cf800494e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Deleted Snapshot of the VM instance {{(pid=63024) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1637.724035] env[63024]: INFO nova.compute.manager [None req-92210e6b-9b1b-4682-9fd6-bc1cf800494e tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Took 20.39 seconds to snapshot the instance on the hypervisor. [ 1637.928038] env[63024]: DEBUG oslo_vmware.api [None req-734108b8-059a-4224-9eea-52ce6e096ae6 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950559, 'name': CreateSnapshot_Task, 'duration_secs': 1.311522} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1637.928407] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1637.928735] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-734108b8-059a-4224-9eea-52ce6e096ae6 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Created Snapshot of the VM instance {{(pid=63024) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1637.928942] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0ad9c69a-51ae-4014-916e-771802736e91 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.931687] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a429bb8a-657a-4694-a0b0-e5f05c55cd09 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1637.962119] env[63024]: DEBUG oslo_concurrency.lockutils [None req-873d6a6c-5457-40be-80dc-b062a2d3e47a tempest-AttachInterfacesV270Test-1560530248 tempest-AttachInterfacesV270Test-1560530248-project-member] Lock "6e477ec2-9270-42b1-85bd-a315460d9cab" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.576s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1637.962344] env[63024]: DEBUG oslo_vmware.api [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Waiting for the task: (returnval){ [ 1637.962344] env[63024]: value = "task-1950562" [ 1637.962344] env[63024]: _type = "Task" [ 1637.962344] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1637.966914] env[63024]: DEBUG oslo_concurrency.lockutils [req-4135afc9-cb4f-4556-9369-240d8a34a06f req-4569a20e-f251-48f8-bc38-89c83db013f9 service nova] Releasing lock "refresh_cache-b629b4f8-f79f-4361-b78c-8705a6888a9e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1637.979545] env[63024]: DEBUG oslo_vmware.api [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950562, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.028521] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1638.162925] env[63024]: DEBUG oslo_vmware.api [None req-28055a88-5b7f-4a76-af3b-355523600d08 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Task: {'id': task-1950561, 'name': PowerOffVM_Task, 'duration_secs': 0.293519} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1638.168775] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-28055a88-5b7f-4a76-af3b-355523600d08 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1638.168996] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-28055a88-5b7f-4a76-af3b-355523600d08 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1638.169642] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950560, 'name': CreateVM_Task, 'duration_secs': 0.483087} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1638.170142] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-eb751dfd-f97b-422a-8836-3deabfb63d03 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.171742] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1638.172448] env[63024]: DEBUG oslo_concurrency.lockutils [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1638.172609] env[63024]: DEBUG oslo_concurrency.lockutils [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1638.172943] env[63024]: DEBUG oslo_concurrency.lockutils [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1638.173691] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39d890bd-a57c-494a-9098-600eb906f040 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.179418] env[63024]: DEBUG oslo_vmware.api [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Waiting for the task: (returnval){ [ 1638.179418] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5232fb46-1ac7-ace4-838f-a59b08208698" [ 1638.179418] env[63024]: _type = "Task" [ 1638.179418] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1638.188511] env[63024]: DEBUG nova.compute.manager [req-914868e1-be4e-4c0e-a17a-ba724932fde9 req-e04eed5a-e832-46d6-b90b-6a560b310086 service nova] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] Received event network-vif-plugged-f9a7bea2-256c-4de5-9b61-91fa78298f4a {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1638.188511] env[63024]: DEBUG oslo_concurrency.lockutils [req-914868e1-be4e-4c0e-a17a-ba724932fde9 req-e04eed5a-e832-46d6-b90b-6a560b310086 service nova] Acquiring lock "85d6db13-d317-498e-a36a-972e9b36e82b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1638.188511] env[63024]: DEBUG oslo_concurrency.lockutils [req-914868e1-be4e-4c0e-a17a-ba724932fde9 req-e04eed5a-e832-46d6-b90b-6a560b310086 service nova] Lock "85d6db13-d317-498e-a36a-972e9b36e82b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1638.188511] env[63024]: DEBUG oslo_concurrency.lockutils [req-914868e1-be4e-4c0e-a17a-ba724932fde9 req-e04eed5a-e832-46d6-b90b-6a560b310086 service nova] Lock "85d6db13-d317-498e-a36a-972e9b36e82b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1638.188511] env[63024]: DEBUG nova.compute.manager [req-914868e1-be4e-4c0e-a17a-ba724932fde9 req-e04eed5a-e832-46d6-b90b-6a560b310086 service nova] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] No waiting events found dispatching network-vif-plugged-f9a7bea2-256c-4de5-9b61-91fa78298f4a {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1638.188878] env[63024]: WARNING nova.compute.manager [req-914868e1-be4e-4c0e-a17a-ba724932fde9 req-e04eed5a-e832-46d6-b90b-6a560b310086 service nova] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] Received unexpected event network-vif-plugged-f9a7bea2-256c-4de5-9b61-91fa78298f4a for instance with vm_state building and task_state spawning. [ 1638.188878] env[63024]: DEBUG nova.compute.manager [req-914868e1-be4e-4c0e-a17a-ba724932fde9 req-e04eed5a-e832-46d6-b90b-6a560b310086 service nova] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] Received event network-changed-f9a7bea2-256c-4de5-9b61-91fa78298f4a {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1638.189035] env[63024]: DEBUG nova.compute.manager [req-914868e1-be4e-4c0e-a17a-ba724932fde9 req-e04eed5a-e832-46d6-b90b-6a560b310086 service nova] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] Refreshing instance network info cache due to event network-changed-f9a7bea2-256c-4de5-9b61-91fa78298f4a. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1638.189863] env[63024]: DEBUG oslo_concurrency.lockutils [req-914868e1-be4e-4c0e-a17a-ba724932fde9 req-e04eed5a-e832-46d6-b90b-6a560b310086 service nova] Acquiring lock "refresh_cache-85d6db13-d317-498e-a36a-972e9b36e82b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1638.189863] env[63024]: DEBUG oslo_concurrency.lockutils [req-914868e1-be4e-4c0e-a17a-ba724932fde9 req-e04eed5a-e832-46d6-b90b-6a560b310086 service nova] Acquired lock "refresh_cache-85d6db13-d317-498e-a36a-972e9b36e82b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1638.189863] env[63024]: DEBUG nova.network.neutron [req-914868e1-be4e-4c0e-a17a-ba724932fde9 req-e04eed5a-e832-46d6-b90b-6a560b310086 service nova] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] Refreshing network info cache for port f9a7bea2-256c-4de5-9b61-91fa78298f4a {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1638.202763] env[63024]: DEBUG oslo_vmware.api [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5232fb46-1ac7-ace4-838f-a59b08208698, 'name': SearchDatastore_Task, 'duration_secs': 0.013947} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1638.204060] env[63024]: DEBUG oslo_concurrency.lockutils [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1638.206063] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1638.206063] env[63024]: DEBUG oslo_concurrency.lockutils [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1638.206063] env[63024]: DEBUG oslo_concurrency.lockutils [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1638.206063] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1638.206300] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-75497b65-9753-41fd-b8d7-454baa077664 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.217688] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7b8e5583-2124-4efb-b280-1b22454be363 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Releasing lock "refresh_cache-726d9639-1ab4-46a9-975e-5580c8344a37" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1638.218224] env[63024]: DEBUG nova.compute.manager [None req-7b8e5583-2124-4efb-b280-1b22454be363 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1638.218424] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-7b8e5583-2124-4efb-b280-1b22454be363 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1638.219316] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-401fb1ef-8194-487c-96ff-d42368a5f585 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.224341] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1638.224499] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1638.228268] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18e70f01-8cb2-4b7d-990c-662872605133 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.237290] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b8e5583-2124-4efb-b280-1b22454be363 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1638.238899] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3ec91c2e-4ccc-42e3-8399-1cbc911a21bd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.242357] env[63024]: DEBUG oslo_vmware.api [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Waiting for the task: (returnval){ [ 1638.242357] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]529a79c8-67d8-769d-8c68-e05aeec7e15b" [ 1638.242357] env[63024]: _type = "Task" [ 1638.242357] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1638.247383] env[63024]: DEBUG oslo_vmware.api [None req-7b8e5583-2124-4efb-b280-1b22454be363 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Waiting for the task: (returnval){ [ 1638.247383] env[63024]: value = "task-1950564" [ 1638.247383] env[63024]: _type = "Task" [ 1638.247383] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1638.254142] env[63024]: DEBUG oslo_vmware.api [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]529a79c8-67d8-769d-8c68-e05aeec7e15b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.259297] env[63024]: DEBUG oslo_vmware.api [None req-7b8e5583-2124-4efb-b280-1b22454be363 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': task-1950564, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.348173] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-28055a88-5b7f-4a76-af3b-355523600d08 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1638.348426] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-28055a88-5b7f-4a76-af3b-355523600d08 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1638.348571] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-28055a88-5b7f-4a76-af3b-355523600d08 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Deleting the datastore file [datastore1] 17e1dfa2-b104-4aac-928e-6364da155c3d {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1638.349013] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e0023815-baca-4b1f-9fd2-d3e54fc578ff {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.357286] env[63024]: DEBUG oslo_vmware.api [None req-28055a88-5b7f-4a76-af3b-355523600d08 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Waiting for the task: (returnval){ [ 1638.357286] env[63024]: value = "task-1950565" [ 1638.357286] env[63024]: _type = "Task" [ 1638.357286] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1638.364560] env[63024]: DEBUG oslo_vmware.api [None req-28055a88-5b7f-4a76-af3b-355523600d08 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Task: {'id': task-1950565, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.473316] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-734108b8-059a-4224-9eea-52ce6e096ae6 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Creating linked-clone VM from snapshot {{(pid=63024) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1638.478162] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-e4c69a37-3ee5-4a64-b394-5c99d76c5857 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.493399] env[63024]: DEBUG oslo_vmware.api [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950562, 'name': PowerOffVM_Task, 'duration_secs': 0.21564} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1638.496977] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1638.497410] env[63024]: DEBUG oslo_vmware.api [None req-734108b8-059a-4224-9eea-52ce6e096ae6 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Waiting for the task: (returnval){ [ 1638.497410] env[63024]: value = "task-1950566" [ 1638.497410] env[63024]: _type = "Task" [ 1638.497410] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1638.505948] env[63024]: DEBUG nova.network.neutron [None req-a7c7ab9a-d268-4021-8bef-db2d9dbfeb12 tempest-ServerExternalEventsTest-1144202468 tempest-ServerExternalEventsTest-1144202468-project] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Updating instance_info_cache with network_info: [{"id": "246e1d4e-5ecf-48af-aca8-d7ee68ab39c7", "address": "fa:16:3e:26:88:6e", "network": {"id": "b22bfe1a-f169-41c3-ac9b-fdcf93268110", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.74", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d78401abb63840f4b461856cfdb6dbbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap246e1d4e-5e", "ovs_interfaceid": "246e1d4e-5ecf-48af-aca8-d7ee68ab39c7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1638.512177] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-068b8b25-6d8e-482a-a482-863be2522376 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.512177] env[63024]: DEBUG oslo_concurrency.lockutils [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Acquiring lock "cc5cfa6d-d3db-4997-8413-2460e1124f02" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1638.512177] env[63024]: DEBUG oslo_concurrency.lockutils [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Lock "cc5cfa6d-d3db-4997-8413-2460e1124f02" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1638.552400] env[63024]: DEBUG oslo_vmware.api [None req-734108b8-059a-4224-9eea-52ce6e096ae6 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950566, 'name': CloneVM_Task} progress is 12%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.556232] env[63024]: DEBUG nova.network.neutron [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Successfully updated port: c1430120-4c82-424a-8155-f1e22eb4a7ae {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1638.558024] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c53035b9-1fb3-4b8b-a35e-c2f8e743d0e6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.579688] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-839bea53-b665-4bd4-8d51-388fe1b27930 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.592175] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4f1840d-3210-49d8-b122-2a14346ef6cb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.599714] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c0df2455-e62c-49b6-950d-7e7d146e4c7c tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Acquiring lock "e3c9e9de-586d-4baa-b4bb-95c41d527a03" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1638.599958] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c0df2455-e62c-49b6-950d-7e7d146e4c7c tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Lock "e3c9e9de-586d-4baa-b4bb-95c41d527a03" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1638.600215] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c0df2455-e62c-49b6-950d-7e7d146e4c7c tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Acquiring lock "e3c9e9de-586d-4baa-b4bb-95c41d527a03-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1638.600390] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c0df2455-e62c-49b6-950d-7e7d146e4c7c tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Lock "e3c9e9de-586d-4baa-b4bb-95c41d527a03-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1638.600558] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c0df2455-e62c-49b6-950d-7e7d146e4c7c tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Lock "e3c9e9de-586d-4baa-b4bb-95c41d527a03-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1638.632623] env[63024]: INFO nova.compute.manager [None req-c0df2455-e62c-49b6-950d-7e7d146e4c7c tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Terminating instance [ 1638.634969] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79e03feb-c246-4e4f-b24f-71d7534b8eb7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.641992] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1638.642253] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a3b766b2-8f82-4692-9908-5fab60ac3c2f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.648261] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1cf97d3-df72-4f8c-9b55-714f347f031a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.654187] env[63024]: DEBUG oslo_vmware.api [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Waiting for the task: (returnval){ [ 1638.654187] env[63024]: value = "task-1950567" [ 1638.654187] env[63024]: _type = "Task" [ 1638.654187] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1638.667510] env[63024]: DEBUG nova.compute.provider_tree [None req-57b2ef90-159e-4c66-a16d-da3832e84635 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1638.674758] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] VM already powered off {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1638.674971] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1638.675237] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1638.760232] env[63024]: DEBUG oslo_vmware.api [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]529a79c8-67d8-769d-8c68-e05aeec7e15b, 'name': SearchDatastore_Task, 'duration_secs': 0.009361} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1638.765043] env[63024]: DEBUG oslo_vmware.api [None req-7b8e5583-2124-4efb-b280-1b22454be363 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': task-1950564, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.767217] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c55793d9-d722-4d60-91c7-15859caf66b3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1638.773192] env[63024]: DEBUG oslo_vmware.api [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Waiting for the task: (returnval){ [ 1638.773192] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52f1ac72-fdea-7d0a-daa3-fbdc5dcbfd62" [ 1638.773192] env[63024]: _type = "Task" [ 1638.773192] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1638.781583] env[63024]: DEBUG oslo_vmware.api [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52f1ac72-fdea-7d0a-daa3-fbdc5dcbfd62, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1638.868312] env[63024]: DEBUG oslo_vmware.api [None req-28055a88-5b7f-4a76-af3b-355523600d08 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Task: {'id': task-1950565, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.206393} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1638.868581] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-28055a88-5b7f-4a76-af3b-355523600d08 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1638.868771] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-28055a88-5b7f-4a76-af3b-355523600d08 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1638.868974] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-28055a88-5b7f-4a76-af3b-355523600d08 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1638.869378] env[63024]: INFO nova.compute.manager [None req-28055a88-5b7f-4a76-af3b-355523600d08 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Took 1.27 seconds to destroy the instance on the hypervisor. [ 1638.869480] env[63024]: DEBUG oslo.service.loopingcall [None req-28055a88-5b7f-4a76-af3b-355523600d08 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1638.869864] env[63024]: DEBUG nova.compute.manager [-] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1638.869864] env[63024]: DEBUG nova.network.neutron [-] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1639.016932] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a7c7ab9a-d268-4021-8bef-db2d9dbfeb12 tempest-ServerExternalEventsTest-1144202468 tempest-ServerExternalEventsTest-1144202468-project] Releasing lock "refresh_cache-e3c9e9de-586d-4baa-b4bb-95c41d527a03" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1639.028269] env[63024]: DEBUG oslo_vmware.api [None req-734108b8-059a-4224-9eea-52ce6e096ae6 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950566, 'name': CloneVM_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1639.062458] env[63024]: DEBUG oslo_concurrency.lockutils [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Acquiring lock "refresh_cache-b7f26f0e-d5a9-42a6-8af2-065659f89cf5" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1639.062609] env[63024]: DEBUG oslo_concurrency.lockutils [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Acquired lock "refresh_cache-b7f26f0e-d5a9-42a6-8af2-065659f89cf5" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1639.062766] env[63024]: DEBUG nova.network.neutron [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1639.147391] env[63024]: DEBUG nova.compute.manager [None req-c0df2455-e62c-49b6-950d-7e7d146e4c7c tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1639.147729] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c0df2455-e62c-49b6-950d-7e7d146e4c7c tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1639.148763] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1f8b961-4b15-4e0a-b517-b903d0aa2cea {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.162985] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0df2455-e62c-49b6-950d-7e7d146e4c7c tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1639.163872] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f0b4819b-382e-404b-b040-a02086241422 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.175208] env[63024]: DEBUG oslo_vmware.api [None req-c0df2455-e62c-49b6-950d-7e7d146e4c7c tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Waiting for the task: (returnval){ [ 1639.175208] env[63024]: value = "task-1950569" [ 1639.175208] env[63024]: _type = "Task" [ 1639.175208] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1639.197358] env[63024]: DEBUG oslo_vmware.api [None req-c0df2455-e62c-49b6-950d-7e7d146e4c7c tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Task: {'id': task-1950569, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1639.199802] env[63024]: ERROR nova.scheduler.client.report [None req-57b2ef90-159e-4c66-a16d-da3832e84635 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] [req-944aaa7c-2c59-4df0-b2c5-4d56b1bf656c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 89dfa68a-133e-436f-a9f1-86051f9fb96b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-944aaa7c-2c59-4df0-b2c5-4d56b1bf656c"}]} [ 1639.228632] env[63024]: DEBUG nova.scheduler.client.report [None req-57b2ef90-159e-4c66-a16d-da3832e84635 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Refreshing inventories for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1639.260325] env[63024]: DEBUG oslo_vmware.api [None req-7b8e5583-2124-4efb-b280-1b22454be363 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': task-1950564, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1639.262540] env[63024]: DEBUG nova.scheduler.client.report [None req-57b2ef90-159e-4c66-a16d-da3832e84635 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Updating ProviderTree inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1639.262760] env[63024]: DEBUG nova.compute.provider_tree [None req-57b2ef90-159e-4c66-a16d-da3832e84635 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1639.275327] env[63024]: DEBUG nova.network.neutron [req-914868e1-be4e-4c0e-a17a-ba724932fde9 req-e04eed5a-e832-46d6-b90b-6a560b310086 service nova] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] Updated VIF entry in instance network info cache for port f9a7bea2-256c-4de5-9b61-91fa78298f4a. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1639.275681] env[63024]: DEBUG nova.network.neutron [req-914868e1-be4e-4c0e-a17a-ba724932fde9 req-e04eed5a-e832-46d6-b90b-6a560b310086 service nova] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] Updating instance_info_cache with network_info: [{"id": "f9a7bea2-256c-4de5-9b61-91fa78298f4a", "address": "fa:16:3e:23:0a:b3", "network": {"id": "b22bfe1a-f169-41c3-ac9b-fdcf93268110", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.183", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d78401abb63840f4b461856cfdb6dbbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf9a7bea2-25", "ovs_interfaceid": "f9a7bea2-256c-4de5-9b61-91fa78298f4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1639.287290] env[63024]: DEBUG nova.scheduler.client.report [None req-57b2ef90-159e-4c66-a16d-da3832e84635 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Refreshing aggregate associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, aggregates: None {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1639.299076] env[63024]: DEBUG oslo_vmware.api [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52f1ac72-fdea-7d0a-daa3-fbdc5dcbfd62, 'name': SearchDatastore_Task, 'duration_secs': 0.014378} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1639.299395] env[63024]: DEBUG oslo_concurrency.lockutils [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1639.299706] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 85d6db13-d317-498e-a36a-972e9b36e82b/85d6db13-d317-498e-a36a-972e9b36e82b.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1639.300454] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1639.300716] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1639.301068] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c09fc90e-c0e4-4f97-bf34-a10f8f07fd3a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.303523] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-22574de9-2ec6-46a1-82d8-b8a4c92dc9f2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.311722] env[63024]: DEBUG oslo_vmware.api [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Waiting for the task: (returnval){ [ 1639.311722] env[63024]: value = "task-1950570" [ 1639.311722] env[63024]: _type = "Task" [ 1639.311722] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1639.312799] env[63024]: DEBUG nova.scheduler.client.report [None req-57b2ef90-159e-4c66-a16d-da3832e84635 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Refreshing trait associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1639.325459] env[63024]: DEBUG oslo_vmware.api [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Task: {'id': task-1950570, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1639.526871] env[63024]: DEBUG oslo_vmware.api [None req-734108b8-059a-4224-9eea-52ce6e096ae6 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950566, 'name': CloneVM_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1639.605502] env[63024]: DEBUG nova.compute.manager [req-e7ddc6ee-84af-4e7a-9cd2-751dd6b98fc6 req-2e22a0ee-245c-40cb-ba9e-0cd35e7dcb0a service nova] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Received event network-vif-deleted-bd3ccdb3-eda1-48fc-9c1d-6e0e7820bcb8 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1639.605732] env[63024]: INFO nova.compute.manager [req-e7ddc6ee-84af-4e7a-9cd2-751dd6b98fc6 req-2e22a0ee-245c-40cb-ba9e-0cd35e7dcb0a service nova] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Neutron deleted interface bd3ccdb3-eda1-48fc-9c1d-6e0e7820bcb8; detaching it from the instance and deleting it from the info cache [ 1639.605915] env[63024]: DEBUG nova.network.neutron [req-e7ddc6ee-84af-4e7a-9cd2-751dd6b98fc6 req-2e22a0ee-245c-40cb-ba9e-0cd35e7dcb0a service nova] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1639.620099] env[63024]: DEBUG nova.network.neutron [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1639.699839] env[63024]: DEBUG oslo_vmware.api [None req-c0df2455-e62c-49b6-950d-7e7d146e4c7c tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Task: {'id': task-1950569, 'name': PowerOffVM_Task, 'duration_secs': 0.36076} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1639.700423] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0df2455-e62c-49b6-950d-7e7d146e4c7c tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1639.700605] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c0df2455-e62c-49b6-950d-7e7d146e4c7c tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1639.700894] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6f88c720-e8a3-4bdc-9a7a-b86506a8557c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.766222] env[63024]: DEBUG oslo_vmware.api [None req-7b8e5583-2124-4efb-b280-1b22454be363 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': task-1950564, 'name': PowerOffVM_Task, 'duration_secs': 1.156837} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1639.769472] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b8e5583-2124-4efb-b280-1b22454be363 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1639.769836] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-7b8e5583-2124-4efb-b280-1b22454be363 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1639.770504] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b21c8e1c-8ee3-4617-960e-5bbac705576f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.781029] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c0df2455-e62c-49b6-950d-7e7d146e4c7c tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1639.781254] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c0df2455-e62c-49b6-950d-7e7d146e4c7c tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1639.781434] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0df2455-e62c-49b6-950d-7e7d146e4c7c tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Deleting the datastore file [datastore1] e3c9e9de-586d-4baa-b4bb-95c41d527a03 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1639.781688] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9b096142-2e58-4eb6-bfc2-335f7c633dc6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.792662] env[63024]: DEBUG oslo_concurrency.lockutils [req-914868e1-be4e-4c0e-a17a-ba724932fde9 req-e04eed5a-e832-46d6-b90b-6a560b310086 service nova] Releasing lock "refresh_cache-85d6db13-d317-498e-a36a-972e9b36e82b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1639.798794] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-7b8e5583-2124-4efb-b280-1b22454be363 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1639.799222] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-7b8e5583-2124-4efb-b280-1b22454be363 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1639.799318] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b8e5583-2124-4efb-b280-1b22454be363 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Deleting the datastore file [datastore1] 726d9639-1ab4-46a9-975e-5580c8344a37 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1639.801037] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bfa0c52d-97e2-42e2-8195-cd5af248e9e0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.803735] env[63024]: DEBUG oslo_vmware.api [None req-c0df2455-e62c-49b6-950d-7e7d146e4c7c tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Waiting for the task: (returnval){ [ 1639.803735] env[63024]: value = "task-1950573" [ 1639.803735] env[63024]: _type = "Task" [ 1639.803735] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1639.813406] env[63024]: DEBUG oslo_vmware.api [None req-7b8e5583-2124-4efb-b280-1b22454be363 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Waiting for the task: (returnval){ [ 1639.813406] env[63024]: value = "task-1950574" [ 1639.813406] env[63024]: _type = "Task" [ 1639.813406] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1639.822448] env[63024]: DEBUG oslo_vmware.api [None req-c0df2455-e62c-49b6-950d-7e7d146e4c7c tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Task: {'id': task-1950573, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1639.825641] env[63024]: DEBUG nova.network.neutron [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Updating instance_info_cache with network_info: [{"id": "c1430120-4c82-424a-8155-f1e22eb4a7ae", "address": "fa:16:3e:57:65:47", "network": {"id": "19563e44-3f43-4aee-8e65-abfb07528a6a", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-310580499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5d51c3beec44aecb65ba72dffb42d40", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "03ac2c9c-6ad2-4a85-bfab-c7e336df859a", "external-id": "nsx-vlan-transportzone-379", "segmentation_id": 379, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1430120-4c", "ovs_interfaceid": "c1430120-4c82-424a-8155-f1e22eb4a7ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1639.834764] env[63024]: DEBUG nova.network.neutron [-] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1639.835582] env[63024]: DEBUG oslo_vmware.api [None req-7b8e5583-2124-4efb-b280-1b22454be363 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': task-1950574, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1639.838832] env[63024]: DEBUG oslo_vmware.api [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Task: {'id': task-1950570, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1639.845146] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6304bdd7-5207-4dd6-b1b7-02c183527f82 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.853442] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95a9e472-0e5b-4306-bc61-0790eb04f26c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.883993] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-385d06c7-2202-450a-972e-fb313002e83d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.891549] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bd26464-d294-427d-b36f-fca6c113de58 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1639.908237] env[63024]: DEBUG nova.compute.provider_tree [None req-57b2ef90-159e-4c66-a16d-da3832e84635 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1640.022149] env[63024]: DEBUG oslo_vmware.api [None req-734108b8-059a-4224-9eea-52ce6e096ae6 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950566, 'name': CloneVM_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1640.109745] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9be706ef-1565-4603-831d-f2595934745a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.118055] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-258ae85b-97da-45c6-a71e-e1efb0f4e5f1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.155614] env[63024]: DEBUG nova.compute.manager [req-e7ddc6ee-84af-4e7a-9cd2-751dd6b98fc6 req-2e22a0ee-245c-40cb-ba9e-0cd35e7dcb0a service nova] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Detach interface failed, port_id=bd3ccdb3-eda1-48fc-9c1d-6e0e7820bcb8, reason: Instance 17e1dfa2-b104-4aac-928e-6364da155c3d could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 1640.204574] env[63024]: DEBUG oslo_concurrency.lockutils [None req-272a0e64-f771-421a-823f-80b9a4754f5d tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Acquiring lock "22ef5bae-f7bc-43c7-9d77-1b4547e83b24" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1640.204574] env[63024]: DEBUG oslo_concurrency.lockutils [None req-272a0e64-f771-421a-823f-80b9a4754f5d tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Lock "22ef5bae-f7bc-43c7-9d77-1b4547e83b24" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1640.204574] env[63024]: DEBUG oslo_concurrency.lockutils [None req-272a0e64-f771-421a-823f-80b9a4754f5d tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Acquiring lock "22ef5bae-f7bc-43c7-9d77-1b4547e83b24-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1640.204574] env[63024]: DEBUG oslo_concurrency.lockutils [None req-272a0e64-f771-421a-823f-80b9a4754f5d tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Lock "22ef5bae-f7bc-43c7-9d77-1b4547e83b24-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1640.204967] env[63024]: DEBUG oslo_concurrency.lockutils [None req-272a0e64-f771-421a-823f-80b9a4754f5d tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Lock "22ef5bae-f7bc-43c7-9d77-1b4547e83b24-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1640.207215] env[63024]: INFO nova.compute.manager [None req-272a0e64-f771-421a-823f-80b9a4754f5d tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Terminating instance [ 1640.317025] env[63024]: DEBUG oslo_vmware.api [None req-c0df2455-e62c-49b6-950d-7e7d146e4c7c tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Task: {'id': task-1950573, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1640.324927] env[63024]: DEBUG oslo_vmware.api [None req-7b8e5583-2124-4efb-b280-1b22454be363 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': task-1950574, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1640.332218] env[63024]: DEBUG oslo_concurrency.lockutils [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Releasing lock "refresh_cache-b7f26f0e-d5a9-42a6-8af2-065659f89cf5" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1640.332511] env[63024]: DEBUG nova.compute.manager [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Instance network_info: |[{"id": "c1430120-4c82-424a-8155-f1e22eb4a7ae", "address": "fa:16:3e:57:65:47", "network": {"id": "19563e44-3f43-4aee-8e65-abfb07528a6a", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-310580499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5d51c3beec44aecb65ba72dffb42d40", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "03ac2c9c-6ad2-4a85-bfab-c7e336df859a", "external-id": "nsx-vlan-transportzone-379", "segmentation_id": 379, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1430120-4c", "ovs_interfaceid": "c1430120-4c82-424a-8155-f1e22eb4a7ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1640.332771] env[63024]: DEBUG oslo_vmware.api [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Task: {'id': task-1950570, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1640.333116] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:57:65:47', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '03ac2c9c-6ad2-4a85-bfab-c7e336df859a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c1430120-4c82-424a-8155-f1e22eb4a7ae', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1640.340381] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Creating folder: Project (a5d51c3beec44aecb65ba72dffb42d40). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1640.340806] env[63024]: INFO nova.compute.manager [-] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Took 1.47 seconds to deallocate network for instance. [ 1640.341032] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6b17f0d1-9238-4063-a05a-a7cb309e11eb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.353743] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Created folder: Project (a5d51c3beec44aecb65ba72dffb42d40) in parent group-v401959. [ 1640.354551] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Creating folder: Instances. Parent ref: group-v402034. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1640.354551] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4ad8fc34-10df-4866-9fac-61fb1d2bde92 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.364220] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Created folder: Instances in parent group-v402034. [ 1640.364581] env[63024]: DEBUG oslo.service.loopingcall [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1640.364923] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1640.365167] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1847f9ea-1578-4386-9698-60f540bdcb82 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.396061] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1640.396061] env[63024]: value = "task-1950577" [ 1640.396061] env[63024]: _type = "Task" [ 1640.396061] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1640.409212] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950577, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1640.453321] env[63024]: DEBUG nova.scheduler.client.report [None req-57b2ef90-159e-4c66-a16d-da3832e84635 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Updated inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with generation 48 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1640.453579] env[63024]: DEBUG nova.compute.provider_tree [None req-57b2ef90-159e-4c66-a16d-da3832e84635 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Updating resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b generation from 48 to 49 during operation: update_inventory {{(pid=63024) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1640.453762] env[63024]: DEBUG nova.compute.provider_tree [None req-57b2ef90-159e-4c66-a16d-da3832e84635 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1640.504830] env[63024]: DEBUG nova.compute.manager [req-2c0dca25-0ef2-496e-a321-175aa4473fc5 req-eeb52b57-6c48-4a40-8788-da005eeeff6c service nova] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Received event network-vif-plugged-c1430120-4c82-424a-8155-f1e22eb4a7ae {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1640.505072] env[63024]: DEBUG oslo_concurrency.lockutils [req-2c0dca25-0ef2-496e-a321-175aa4473fc5 req-eeb52b57-6c48-4a40-8788-da005eeeff6c service nova] Acquiring lock "b7f26f0e-d5a9-42a6-8af2-065659f89cf5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1640.505300] env[63024]: DEBUG oslo_concurrency.lockutils [req-2c0dca25-0ef2-496e-a321-175aa4473fc5 req-eeb52b57-6c48-4a40-8788-da005eeeff6c service nova] Lock "b7f26f0e-d5a9-42a6-8af2-065659f89cf5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1640.505486] env[63024]: DEBUG oslo_concurrency.lockutils [req-2c0dca25-0ef2-496e-a321-175aa4473fc5 req-eeb52b57-6c48-4a40-8788-da005eeeff6c service nova] Lock "b7f26f0e-d5a9-42a6-8af2-065659f89cf5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1640.505598] env[63024]: DEBUG nova.compute.manager [req-2c0dca25-0ef2-496e-a321-175aa4473fc5 req-eeb52b57-6c48-4a40-8788-da005eeeff6c service nova] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] No waiting events found dispatching network-vif-plugged-c1430120-4c82-424a-8155-f1e22eb4a7ae {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1640.505764] env[63024]: WARNING nova.compute.manager [req-2c0dca25-0ef2-496e-a321-175aa4473fc5 req-eeb52b57-6c48-4a40-8788-da005eeeff6c service nova] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Received unexpected event network-vif-plugged-c1430120-4c82-424a-8155-f1e22eb4a7ae for instance with vm_state building and task_state spawning. [ 1640.506135] env[63024]: DEBUG nova.compute.manager [req-2c0dca25-0ef2-496e-a321-175aa4473fc5 req-eeb52b57-6c48-4a40-8788-da005eeeff6c service nova] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Received event network-changed-c1430120-4c82-424a-8155-f1e22eb4a7ae {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1640.506135] env[63024]: DEBUG nova.compute.manager [req-2c0dca25-0ef2-496e-a321-175aa4473fc5 req-eeb52b57-6c48-4a40-8788-da005eeeff6c service nova] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Refreshing instance network info cache due to event network-changed-c1430120-4c82-424a-8155-f1e22eb4a7ae. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1640.506362] env[63024]: DEBUG oslo_concurrency.lockutils [req-2c0dca25-0ef2-496e-a321-175aa4473fc5 req-eeb52b57-6c48-4a40-8788-da005eeeff6c service nova] Acquiring lock "refresh_cache-b7f26f0e-d5a9-42a6-8af2-065659f89cf5" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1640.506461] env[63024]: DEBUG oslo_concurrency.lockutils [req-2c0dca25-0ef2-496e-a321-175aa4473fc5 req-eeb52b57-6c48-4a40-8788-da005eeeff6c service nova] Acquired lock "refresh_cache-b7f26f0e-d5a9-42a6-8af2-065659f89cf5" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1640.506730] env[63024]: DEBUG nova.network.neutron [req-2c0dca25-0ef2-496e-a321-175aa4473fc5 req-eeb52b57-6c48-4a40-8788-da005eeeff6c service nova] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Refreshing network info cache for port c1430120-4c82-424a-8155-f1e22eb4a7ae {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1640.523101] env[63024]: DEBUG oslo_vmware.api [None req-734108b8-059a-4224-9eea-52ce6e096ae6 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950566, 'name': CloneVM_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1640.637837] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1640.637837] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1640.638930] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f3cc740-784f-4724-b0a9-14f4844506a5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.648827] env[63024]: DEBUG oslo_vmware.api [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Waiting for the task: (returnval){ [ 1640.648827] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]521b9e74-8568-d9cf-ef25-c4fc00aed76c" [ 1640.648827] env[63024]: _type = "Task" [ 1640.648827] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1640.659881] env[63024]: DEBUG oslo_vmware.api [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]521b9e74-8568-d9cf-ef25-c4fc00aed76c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1640.713844] env[63024]: DEBUG nova.compute.manager [None req-272a0e64-f771-421a-823f-80b9a4754f5d tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1640.714096] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-272a0e64-f771-421a-823f-80b9a4754f5d tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1640.715069] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64137f6e-bce1-4448-9039-c2c62d6a9a94 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.722747] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-272a0e64-f771-421a-823f-80b9a4754f5d tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1640.723102] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-31e0fb98-3f75-4921-a616-b7400276c250 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1640.731969] env[63024]: DEBUG oslo_vmware.api [None req-272a0e64-f771-421a-823f-80b9a4754f5d tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Waiting for the task: (returnval){ [ 1640.731969] env[63024]: value = "task-1950578" [ 1640.731969] env[63024]: _type = "Task" [ 1640.731969] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1640.743714] env[63024]: DEBUG oslo_vmware.api [None req-272a0e64-f771-421a-823f-80b9a4754f5d tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950578, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1640.820902] env[63024]: DEBUG oslo_vmware.api [None req-c0df2455-e62c-49b6-950d-7e7d146e4c7c tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Task: {'id': task-1950573, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.010901} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1640.824134] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0df2455-e62c-49b6-950d-7e7d146e4c7c tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1640.824338] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c0df2455-e62c-49b6-950d-7e7d146e4c7c tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1640.824514] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c0df2455-e62c-49b6-950d-7e7d146e4c7c tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1640.824706] env[63024]: INFO nova.compute.manager [None req-c0df2455-e62c-49b6-950d-7e7d146e4c7c tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Took 1.68 seconds to destroy the instance on the hypervisor. [ 1640.824955] env[63024]: DEBUG oslo.service.loopingcall [None req-c0df2455-e62c-49b6-950d-7e7d146e4c7c tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1640.825258] env[63024]: DEBUG nova.compute.manager [-] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1640.825356] env[63024]: DEBUG nova.network.neutron [-] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1640.834658] env[63024]: DEBUG oslo_vmware.api [None req-7b8e5583-2124-4efb-b280-1b22454be363 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Task: {'id': task-1950574, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.932325} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1640.835304] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b8e5583-2124-4efb-b280-1b22454be363 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1640.835517] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-7b8e5583-2124-4efb-b280-1b22454be363 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1640.836401] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-7b8e5583-2124-4efb-b280-1b22454be363 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1640.836401] env[63024]: INFO nova.compute.manager [None req-7b8e5583-2124-4efb-b280-1b22454be363 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Took 2.62 seconds to destroy the instance on the hypervisor. [ 1640.836401] env[63024]: DEBUG oslo.service.loopingcall [None req-7b8e5583-2124-4efb-b280-1b22454be363 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1640.836401] env[63024]: DEBUG nova.compute.manager [-] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1640.836574] env[63024]: DEBUG nova.network.neutron [-] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1640.841167] env[63024]: DEBUG oslo_vmware.api [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Task: {'id': task-1950570, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1640.848721] env[63024]: DEBUG oslo_concurrency.lockutils [None req-28055a88-5b7f-4a76-af3b-355523600d08 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1640.875746] env[63024]: DEBUG nova.network.neutron [-] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1640.905460] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950577, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1640.958878] env[63024]: DEBUG oslo_concurrency.lockutils [None req-57b2ef90-159e-4c66-a16d-da3832e84635 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.544s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1640.961712] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.356s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1640.963360] env[63024]: INFO nova.compute.claims [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1640.992724] env[63024]: INFO nova.scheduler.client.report [None req-57b2ef90-159e-4c66-a16d-da3832e84635 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Deleted allocations for instance e03b8577-9298-4e88-98ea-6258e97db28d [ 1641.027142] env[63024]: DEBUG oslo_vmware.api [None req-734108b8-059a-4224-9eea-52ce6e096ae6 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950566, 'name': CloneVM_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.162757] env[63024]: DEBUG oslo_vmware.api [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]521b9e74-8568-d9cf-ef25-c4fc00aed76c, 'name': SearchDatastore_Task, 'duration_secs': 0.012332} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1641.163830] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-232f1898-e2d1-4e54-b482-5a39baeccce7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.172332] env[63024]: DEBUG oslo_vmware.api [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Waiting for the task: (returnval){ [ 1641.172332] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]520de2d4-33cd-b33b-8834-226006a18d1c" [ 1641.172332] env[63024]: _type = "Task" [ 1641.172332] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1641.181474] env[63024]: DEBUG oslo_vmware.api [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]520de2d4-33cd-b33b-8834-226006a18d1c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.246378] env[63024]: DEBUG oslo_vmware.api [None req-272a0e64-f771-421a-823f-80b9a4754f5d tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950578, 'name': PowerOffVM_Task, 'duration_secs': 0.244818} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1641.246693] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-272a0e64-f771-421a-823f-80b9a4754f5d tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1641.246900] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-272a0e64-f771-421a-823f-80b9a4754f5d tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1641.247197] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-92e6d75e-5b51-42f3-b863-bf5ed62f17b8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.336186] env[63024]: DEBUG oslo_vmware.api [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Task: {'id': task-1950570, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.979556} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1641.336487] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 85d6db13-d317-498e-a36a-972e9b36e82b/85d6db13-d317-498e-a36a-972e9b36e82b.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1641.336726] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1641.337052] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-97c7ffe8-e96f-482b-adaf-5d2336d30f53 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.346018] env[63024]: DEBUG oslo_vmware.api [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Waiting for the task: (returnval){ [ 1641.346018] env[63024]: value = "task-1950580" [ 1641.346018] env[63024]: _type = "Task" [ 1641.346018] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1641.355532] env[63024]: DEBUG oslo_vmware.api [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Task: {'id': task-1950580, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.379066] env[63024]: DEBUG nova.network.neutron [-] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1641.409350] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950577, 'name': CreateVM_Task, 'duration_secs': 0.545639} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1641.409580] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1641.410380] env[63024]: DEBUG oslo_concurrency.lockutils [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1641.410619] env[63024]: DEBUG oslo_concurrency.lockutils [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1641.411028] env[63024]: DEBUG oslo_concurrency.lockutils [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1641.411292] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85bce532-ec6f-46d9-a6a7-69e67554eaad {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.418010] env[63024]: DEBUG oslo_vmware.api [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Waiting for the task: (returnval){ [ 1641.418010] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52d5db3b-4c4e-63d1-8638-9f4f543e3790" [ 1641.418010] env[63024]: _type = "Task" [ 1641.418010] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1641.430776] env[63024]: DEBUG oslo_vmware.api [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52d5db3b-4c4e-63d1-8638-9f4f543e3790, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.442645] env[63024]: DEBUG nova.network.neutron [req-2c0dca25-0ef2-496e-a321-175aa4473fc5 req-eeb52b57-6c48-4a40-8788-da005eeeff6c service nova] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Updated VIF entry in instance network info cache for port c1430120-4c82-424a-8155-f1e22eb4a7ae. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1641.443016] env[63024]: DEBUG nova.network.neutron [req-2c0dca25-0ef2-496e-a321-175aa4473fc5 req-eeb52b57-6c48-4a40-8788-da005eeeff6c service nova] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Updating instance_info_cache with network_info: [{"id": "c1430120-4c82-424a-8155-f1e22eb4a7ae", "address": "fa:16:3e:57:65:47", "network": {"id": "19563e44-3f43-4aee-8e65-abfb07528a6a", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-310580499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5d51c3beec44aecb65ba72dffb42d40", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "03ac2c9c-6ad2-4a85-bfab-c7e336df859a", "external-id": "nsx-vlan-transportzone-379", "segmentation_id": 379, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1430120-4c", "ovs_interfaceid": "c1430120-4c82-424a-8155-f1e22eb4a7ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1641.464680] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-272a0e64-f771-421a-823f-80b9a4754f5d tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1641.464876] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-272a0e64-f771-421a-823f-80b9a4754f5d tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1641.465139] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-272a0e64-f771-421a-823f-80b9a4754f5d tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Deleting the datastore file [datastore1] 22ef5bae-f7bc-43c7-9d77-1b4547e83b24 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1641.465752] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-01d063e1-1ac5-4aa1-b782-5607ada56f6d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.474808] env[63024]: DEBUG oslo_vmware.api [None req-272a0e64-f771-421a-823f-80b9a4754f5d tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Waiting for the task: (returnval){ [ 1641.474808] env[63024]: value = "task-1950581" [ 1641.474808] env[63024]: _type = "Task" [ 1641.474808] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1641.483796] env[63024]: DEBUG oslo_vmware.api [None req-272a0e64-f771-421a-823f-80b9a4754f5d tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950581, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.507452] env[63024]: DEBUG oslo_concurrency.lockutils [None req-57b2ef90-159e-4c66-a16d-da3832e84635 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037 tempest-FloatingIPsAssociationNegativeTestJSON-1687429037-project-member] Lock "e03b8577-9298-4e88-98ea-6258e97db28d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.449s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1641.527779] env[63024]: DEBUG oslo_vmware.api [None req-734108b8-059a-4224-9eea-52ce6e096ae6 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950566, 'name': CloneVM_Task} progress is 95%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.623497] env[63024]: DEBUG nova.network.neutron [-] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1641.682093] env[63024]: DEBUG oslo_vmware.api [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]520de2d4-33cd-b33b-8834-226006a18d1c, 'name': SearchDatastore_Task, 'duration_secs': 0.055401} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1641.682460] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1641.682762] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] d49eae54-cccb-4281-aaa0-d6974529eb7b/2646ca61-612e-4bc3-97f7-ee492c048835-rescue.vmdk. {{(pid=63024) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1641.683085] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a4c544bf-132e-41eb-9a0d-aea686e61173 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.690770] env[63024]: DEBUG oslo_vmware.api [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Waiting for the task: (returnval){ [ 1641.690770] env[63024]: value = "task-1950582" [ 1641.690770] env[63024]: _type = "Task" [ 1641.690770] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1641.699053] env[63024]: DEBUG oslo_vmware.api [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950582, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.825272] env[63024]: DEBUG nova.compute.manager [req-d46dfb79-3f5e-4390-addb-4f420c4a8ae2 req-f3595a68-09d5-4f95-b677-5306d98c6521 service nova] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Received event network-vif-deleted-246e1d4e-5ecf-48af-aca8-d7ee68ab39c7 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1641.856662] env[63024]: DEBUG oslo_vmware.api [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Task: {'id': task-1950580, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066271} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1641.857065] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1641.858018] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-847be571-d6ab-4110-b34c-8da76e836978 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.883067] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] Reconfiguring VM instance instance-00000018 to attach disk [datastore1] 85d6db13-d317-498e-a36a-972e9b36e82b/85d6db13-d317-498e-a36a-972e9b36e82b.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1641.883499] env[63024]: INFO nova.compute.manager [-] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Took 1.05 seconds to deallocate network for instance. [ 1641.883742] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-da5e3870-b579-4b2c-a267-bcf8b9e2d167 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.908681] env[63024]: DEBUG oslo_vmware.api [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Waiting for the task: (returnval){ [ 1641.908681] env[63024]: value = "task-1950583" [ 1641.908681] env[63024]: _type = "Task" [ 1641.908681] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1641.917996] env[63024]: DEBUG oslo_vmware.api [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Task: {'id': task-1950583, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.929286] env[63024]: DEBUG oslo_vmware.api [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52d5db3b-4c4e-63d1-8638-9f4f543e3790, 'name': SearchDatastore_Task, 'duration_secs': 0.014501} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1641.929713] env[63024]: DEBUG oslo_concurrency.lockutils [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1641.930049] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1641.930500] env[63024]: DEBUG oslo_concurrency.lockutils [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1641.930725] env[63024]: DEBUG oslo_concurrency.lockutils [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1641.930998] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1641.931355] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8bbef8ef-5318-4eb0-b685-54f76f83bda7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.942983] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1641.943251] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1641.944735] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dfff3753-222e-4d98-9725-6544cbd3726e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1641.947386] env[63024]: DEBUG oslo_concurrency.lockutils [req-2c0dca25-0ef2-496e-a321-175aa4473fc5 req-eeb52b57-6c48-4a40-8788-da005eeeff6c service nova] Releasing lock "refresh_cache-b7f26f0e-d5a9-42a6-8af2-065659f89cf5" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1641.951807] env[63024]: DEBUG oslo_vmware.api [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Waiting for the task: (returnval){ [ 1641.951807] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52cba7df-b719-48aa-498e-dd0c8f54e97f" [ 1641.951807] env[63024]: _type = "Task" [ 1641.951807] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1641.961367] env[63024]: DEBUG oslo_vmware.api [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52cba7df-b719-48aa-498e-dd0c8f54e97f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1641.989487] env[63024]: DEBUG oslo_vmware.api [None req-272a0e64-f771-421a-823f-80b9a4754f5d tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Task: {'id': task-1950581, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.180409} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1641.989777] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-272a0e64-f771-421a-823f-80b9a4754f5d tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1641.989898] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-272a0e64-f771-421a-823f-80b9a4754f5d tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1641.990128] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-272a0e64-f771-421a-823f-80b9a4754f5d tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1641.990446] env[63024]: INFO nova.compute.manager [None req-272a0e64-f771-421a-823f-80b9a4754f5d tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Took 1.28 seconds to destroy the instance on the hypervisor. [ 1641.990689] env[63024]: DEBUG oslo.service.loopingcall [None req-272a0e64-f771-421a-823f-80b9a4754f5d tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1641.990958] env[63024]: DEBUG nova.compute.manager [-] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1641.991139] env[63024]: DEBUG nova.network.neutron [-] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1642.025807] env[63024]: DEBUG oslo_vmware.api [None req-734108b8-059a-4224-9eea-52ce6e096ae6 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950566, 'name': CloneVM_Task, 'duration_secs': 3.280551} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1642.028854] env[63024]: INFO nova.virt.vmwareapi.vmops [None req-734108b8-059a-4224-9eea-52ce6e096ae6 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Created linked-clone VM from snapshot [ 1642.029903] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86a7005f-f67b-4ea4-a6fb-adddeee6cf62 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.037890] env[63024]: DEBUG nova.virt.vmwareapi.images [None req-734108b8-059a-4224-9eea-52ce6e096ae6 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Uploading image 369dafe1-0a7e-4da6-8181-df656ccea797 {{(pid=63024) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1642.068246] env[63024]: DEBUG oslo_vmware.rw_handles [None req-734108b8-059a-4224-9eea-52ce6e096ae6 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1642.068246] env[63024]: value = "vm-402033" [ 1642.068246] env[63024]: _type = "VirtualMachine" [ 1642.068246] env[63024]: }. {{(pid=63024) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1642.068626] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-46ba98bf-faba-4590-aa2e-7b6da9771ab1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.078064] env[63024]: DEBUG oslo_vmware.rw_handles [None req-734108b8-059a-4224-9eea-52ce6e096ae6 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Lease: (returnval){ [ 1642.078064] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]526e4ee1-2673-4da7-6a54-3c85b14dbd40" [ 1642.078064] env[63024]: _type = "HttpNfcLease" [ 1642.078064] env[63024]: } obtained for exporting VM: (result){ [ 1642.078064] env[63024]: value = "vm-402033" [ 1642.078064] env[63024]: _type = "VirtualMachine" [ 1642.078064] env[63024]: }. {{(pid=63024) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1642.078391] env[63024]: DEBUG oslo_vmware.api [None req-734108b8-059a-4224-9eea-52ce6e096ae6 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Waiting for the lease: (returnval){ [ 1642.078391] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]526e4ee1-2673-4da7-6a54-3c85b14dbd40" [ 1642.078391] env[63024]: _type = "HttpNfcLease" [ 1642.078391] env[63024]: } to be ready. {{(pid=63024) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1642.085788] env[63024]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1642.085788] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]526e4ee1-2673-4da7-6a54-3c85b14dbd40" [ 1642.085788] env[63024]: _type = "HttpNfcLease" [ 1642.085788] env[63024]: } is initializing. {{(pid=63024) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1642.126812] env[63024]: INFO nova.compute.manager [-] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Took 1.30 seconds to deallocate network for instance. [ 1642.206585] env[63024]: DEBUG oslo_vmware.api [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950582, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.409124] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7b8e5583-2124-4efb-b280-1b22454be363 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1642.422919] env[63024]: DEBUG oslo_vmware.api [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Task: {'id': task-1950583, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.463138] env[63024]: DEBUG oslo_vmware.api [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52cba7df-b719-48aa-498e-dd0c8f54e97f, 'name': SearchDatastore_Task, 'duration_secs': 0.017258} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1642.464027] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f960ee8e-9681-43c9-a565-66809b3e5bb2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.473787] env[63024]: DEBUG oslo_vmware.api [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Waiting for the task: (returnval){ [ 1642.473787] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5215710f-d729-6bdb-fcd4-69e297f6cd84" [ 1642.473787] env[63024]: _type = "Task" [ 1642.473787] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1642.481661] env[63024]: DEBUG oslo_vmware.api [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5215710f-d729-6bdb-fcd4-69e297f6cd84, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.499081] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ffa6e33-69f3-41cb-802e-85fe54ab7cdb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.507846] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae992897-1af2-4bec-9a93-e079d11e3dc9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.546195] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10fc2a37-e67b-4e95-ba1d-7c21cc4c6851 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.550908] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a848b0fb-bd59-4f66-824a-47a83f692805 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.567593] env[63024]: DEBUG nova.compute.provider_tree [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1642.589064] env[63024]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1642.589064] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]526e4ee1-2673-4da7-6a54-3c85b14dbd40" [ 1642.589064] env[63024]: _type = "HttpNfcLease" [ 1642.589064] env[63024]: } is ready. {{(pid=63024) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1642.589362] env[63024]: DEBUG oslo_vmware.rw_handles [None req-734108b8-059a-4224-9eea-52ce6e096ae6 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1642.589362] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]526e4ee1-2673-4da7-6a54-3c85b14dbd40" [ 1642.589362] env[63024]: _type = "HttpNfcLease" [ 1642.589362] env[63024]: }. {{(pid=63024) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1642.590122] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6b99559-f9fd-483c-b87a-b0f0fb80ddd6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.600411] env[63024]: DEBUG oslo_vmware.rw_handles [None req-734108b8-059a-4224-9eea-52ce6e096ae6 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e30721-7715-c62f-8d8e-f148e51574f2/disk-0.vmdk from lease info. {{(pid=63024) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1642.600624] env[63024]: DEBUG oslo_vmware.rw_handles [None req-734108b8-059a-4224-9eea-52ce6e096ae6 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e30721-7715-c62f-8d8e-f148e51574f2/disk-0.vmdk for reading. {{(pid=63024) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1642.668120] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c0df2455-e62c-49b6-950d-7e7d146e4c7c tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1642.701978] env[63024]: DEBUG oslo_vmware.api [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950582, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.734851} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1642.702379] env[63024]: INFO nova.virt.vmwareapi.ds_util [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] d49eae54-cccb-4281-aaa0-d6974529eb7b/2646ca61-612e-4bc3-97f7-ee492c048835-rescue.vmdk. [ 1642.703107] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-188d1ef1-1041-481d-8b53-43b8a7136fea {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.727977] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Reconfiguring VM instance instance-00000016 to attach disk [datastore1] d49eae54-cccb-4281-aaa0-d6974529eb7b/2646ca61-612e-4bc3-97f7-ee492c048835-rescue.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1642.728316] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-032921ac-4d9e-44d7-92d4-b8e336783645 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.746409] env[63024]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-21ae6331-6521-4802-9b6c-bb636922365c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1642.749491] env[63024]: DEBUG oslo_vmware.api [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Waiting for the task: (returnval){ [ 1642.749491] env[63024]: value = "task-1950585" [ 1642.749491] env[63024]: _type = "Task" [ 1642.749491] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1642.761176] env[63024]: DEBUG oslo_vmware.api [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950585, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.922522] env[63024]: DEBUG oslo_vmware.api [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Task: {'id': task-1950583, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1642.974142] env[63024]: DEBUG nova.compute.manager [req-1cf4271c-9e64-449f-810c-70eefb5d92e6 req-22fdeab9-4647-450f-bca9-f707c047aabd service nova] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Received event network-vif-deleted-7744ae27-9eae-4bcd-b5d8-425150caba4f {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1642.974566] env[63024]: INFO nova.compute.manager [req-1cf4271c-9e64-449f-810c-70eefb5d92e6 req-22fdeab9-4647-450f-bca9-f707c047aabd service nova] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Neutron deleted interface 7744ae27-9eae-4bcd-b5d8-425150caba4f; detaching it from the instance and deleting it from the info cache [ 1642.974566] env[63024]: DEBUG nova.network.neutron [req-1cf4271c-9e64-449f-810c-70eefb5d92e6 req-22fdeab9-4647-450f-bca9-f707c047aabd service nova] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1642.997271] env[63024]: DEBUG oslo_vmware.api [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5215710f-d729-6bdb-fcd4-69e297f6cd84, 'name': SearchDatastore_Task, 'duration_secs': 0.01847} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1643.001170] env[63024]: DEBUG oslo_concurrency.lockutils [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1643.001615] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] b7f26f0e-d5a9-42a6-8af2-065659f89cf5/b7f26f0e-d5a9-42a6-8af2-065659f89cf5.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1643.002354] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-96069e0c-1369-43b3-b8f6-5ea6541e22c2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.014489] env[63024]: DEBUG oslo_vmware.api [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Waiting for the task: (returnval){ [ 1643.014489] env[63024]: value = "task-1950586" [ 1643.014489] env[63024]: _type = "Task" [ 1643.014489] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.024741] env[63024]: DEBUG oslo_vmware.api [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': task-1950586, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.074715] env[63024]: DEBUG nova.scheduler.client.report [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1643.265121] env[63024]: DEBUG oslo_vmware.api [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950585, 'name': ReconfigVM_Task, 'duration_secs': 0.462899} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1643.265121] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Reconfigured VM instance instance-00000016 to attach disk [datastore1] d49eae54-cccb-4281-aaa0-d6974529eb7b/2646ca61-612e-4bc3-97f7-ee492c048835-rescue.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1643.265121] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d991e4b-362d-4f38-9564-c25f984fc2ed {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.305077] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f0119cbe-b5ca-4100-bdb1-e249ce3a5e45 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.317758] env[63024]: DEBUG nova.network.neutron [-] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1643.325419] env[63024]: DEBUG oslo_vmware.api [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Waiting for the task: (returnval){ [ 1643.325419] env[63024]: value = "task-1950587" [ 1643.325419] env[63024]: _type = "Task" [ 1643.325419] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.334509] env[63024]: DEBUG oslo_vmware.api [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950587, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.427111] env[63024]: DEBUG oslo_vmware.api [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Task: {'id': task-1950583, 'name': ReconfigVM_Task, 'duration_secs': 1.333579} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1643.427765] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] Reconfigured VM instance instance-00000018 to attach disk [datastore1] 85d6db13-d317-498e-a36a-972e9b36e82b/85d6db13-d317-498e-a36a-972e9b36e82b.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1643.429027] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-73ddb3fc-6715-420d-80e5-6de46e5ebb6c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.437547] env[63024]: DEBUG oslo_vmware.api [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Waiting for the task: (returnval){ [ 1643.437547] env[63024]: value = "task-1950588" [ 1643.437547] env[63024]: _type = "Task" [ 1643.437547] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.451716] env[63024]: DEBUG oslo_vmware.api [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Task: {'id': task-1950588, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.479518] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9bf73eb4-2384-42ed-a2f1-e11e10fa912f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.492195] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9df9819c-85b4-4f87-b7c8-e36c0d25633b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.528547] env[63024]: DEBUG nova.compute.manager [req-1cf4271c-9e64-449f-810c-70eefb5d92e6 req-22fdeab9-4647-450f-bca9-f707c047aabd service nova] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Detach interface failed, port_id=7744ae27-9eae-4bcd-b5d8-425150caba4f, reason: Instance 22ef5bae-f7bc-43c7-9d77-1b4547e83b24 could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 1643.539143] env[63024]: DEBUG oslo_vmware.api [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': task-1950586, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.583382] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.622s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1643.584044] env[63024]: DEBUG nova.compute.manager [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1643.587409] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.430s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1643.589267] env[63024]: INFO nova.compute.claims [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1643.821245] env[63024]: INFO nova.compute.manager [-] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Took 1.83 seconds to deallocate network for instance. [ 1643.837607] env[63024]: DEBUG oslo_vmware.api [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950587, 'name': ReconfigVM_Task, 'duration_secs': 0.491336} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1643.837607] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1643.837607] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7c7a9d03-8753-4c08-be26-5a3cab99504f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.848315] env[63024]: DEBUG oslo_vmware.api [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Waiting for the task: (returnval){ [ 1643.848315] env[63024]: value = "task-1950589" [ 1643.848315] env[63024]: _type = "Task" [ 1643.848315] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.855365] env[63024]: DEBUG oslo_vmware.api [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950589, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1643.949237] env[63024]: DEBUG oslo_vmware.api [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Task: {'id': task-1950588, 'name': Rename_Task, 'duration_secs': 0.313109} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1643.949870] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1643.951865] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7c99f99a-6706-46f1-b5e3-b5fb32b28ad6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.958016] env[63024]: DEBUG oslo_vmware.api [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Waiting for the task: (returnval){ [ 1643.958016] env[63024]: value = "task-1950590" [ 1643.958016] env[63024]: _type = "Task" [ 1643.958016] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.968634] env[63024]: DEBUG oslo_vmware.api [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Task: {'id': task-1950590, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.038942] env[63024]: DEBUG oslo_vmware.api [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': task-1950586, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.691079} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1644.043758] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] b7f26f0e-d5a9-42a6-8af2-065659f89cf5/b7f26f0e-d5a9-42a6-8af2-065659f89cf5.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1644.044187] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1644.044559] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-02b7e73f-99be-407e-a2e7-d6bac4fa25a7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.052138] env[63024]: DEBUG oslo_vmware.api [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Waiting for the task: (returnval){ [ 1644.052138] env[63024]: value = "task-1950591" [ 1644.052138] env[63024]: _type = "Task" [ 1644.052138] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.063016] env[63024]: DEBUG oslo_vmware.api [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': task-1950591, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.099427] env[63024]: DEBUG nova.compute.utils [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1644.104627] env[63024]: DEBUG nova.compute.manager [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1644.104627] env[63024]: DEBUG nova.network.neutron [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1644.297287] env[63024]: DEBUG nova.policy [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '62964d784b2f4b3fbd8b869ab299eb7a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c720cdab04804a8390b825059692c3f3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1644.329069] env[63024]: DEBUG oslo_concurrency.lockutils [None req-272a0e64-f771-421a-823f-80b9a4754f5d tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1644.359269] env[63024]: DEBUG oslo_vmware.api [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950589, 'name': PowerOnVM_Task} progress is 1%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.470914] env[63024]: DEBUG oslo_vmware.api [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Task: {'id': task-1950590, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.564183] env[63024]: DEBUG oslo_vmware.api [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': task-1950591, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06498} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1644.564693] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1644.565680] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-287e6c07-b901-4a96-896a-c3da0f850c19 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.600405] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Reconfiguring VM instance instance-00000019 to attach disk [datastore1] b7f26f0e-d5a9-42a6-8af2-065659f89cf5/b7f26f0e-d5a9-42a6-8af2-065659f89cf5.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1644.600795] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ee9c3ec2-8588-4c3d-9eea-c57843dbaede {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.619114] env[63024]: DEBUG nova.compute.manager [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1644.630973] env[63024]: DEBUG oslo_vmware.api [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Waiting for the task: (returnval){ [ 1644.630973] env[63024]: value = "task-1950592" [ 1644.630973] env[63024]: _type = "Task" [ 1644.630973] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.639930] env[63024]: DEBUG oslo_vmware.api [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': task-1950592, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.859763] env[63024]: DEBUG oslo_vmware.api [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950589, 'name': PowerOnVM_Task} progress is 64%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.889109] env[63024]: DEBUG nova.network.neutron [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Successfully created port: 72b9a842-8d9f-4bc9-945a-1b144bf0e58c {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1644.971781] env[63024]: DEBUG oslo_vmware.api [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Task: {'id': task-1950590, 'name': PowerOnVM_Task, 'duration_secs': 0.537943} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1644.972100] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1644.972272] env[63024]: INFO nova.compute.manager [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] Took 10.38 seconds to spawn the instance on the hypervisor. [ 1644.972448] env[63024]: DEBUG nova.compute.manager [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1644.975818] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8505ac43-2a82-44d2-b757-0fa66ce66443 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.146757] env[63024]: DEBUG oslo_vmware.api [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': task-1950592, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.175890] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6519016d-a802-415d-b3c8-85c8c4f284e6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.184861] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aad8c9e-7d69-479d-bca1-3fb8d5561a7b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.228049] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a81d428-e4c9-4f33-be77-e431334c5281 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.236971] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47b91edf-9029-406b-8fcd-a507e2d2bccf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.251541] env[63024]: DEBUG nova.compute.provider_tree [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1645.360772] env[63024]: DEBUG oslo_vmware.api [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950589, 'name': PowerOnVM_Task, 'duration_secs': 1.390643} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1645.361159] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1645.363991] env[63024]: DEBUG nova.compute.manager [None req-d4954c98-f22f-4440-b12e-0d024767a8de tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1645.364971] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb6a565c-3ca7-406d-a020-8e21499bc6ba {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.497641] env[63024]: INFO nova.compute.manager [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] Took 39.32 seconds to build instance. [ 1645.637126] env[63024]: DEBUG nova.compute.manager [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1645.654025] env[63024]: DEBUG oslo_vmware.api [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': task-1950592, 'name': ReconfigVM_Task, 'duration_secs': 0.584714} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1645.654324] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Reconfigured VM instance instance-00000019 to attach disk [datastore1] b7f26f0e-d5a9-42a6-8af2-065659f89cf5/b7f26f0e-d5a9-42a6-8af2-065659f89cf5.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1645.655813] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-abbb969c-5a57-4781-9bb3-fef6fc4d9d3e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.663065] env[63024]: DEBUG oslo_vmware.api [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Waiting for the task: (returnval){ [ 1645.663065] env[63024]: value = "task-1950593" [ 1645.663065] env[63024]: _type = "Task" [ 1645.663065] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1645.668350] env[63024]: DEBUG nova.virt.hardware [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1645.668586] env[63024]: DEBUG nova.virt.hardware [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1645.668855] env[63024]: DEBUG nova.virt.hardware [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1645.669071] env[63024]: DEBUG nova.virt.hardware [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1645.669225] env[63024]: DEBUG nova.virt.hardware [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1645.669370] env[63024]: DEBUG nova.virt.hardware [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1645.669576] env[63024]: DEBUG nova.virt.hardware [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1645.669764] env[63024]: DEBUG nova.virt.hardware [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1645.669936] env[63024]: DEBUG nova.virt.hardware [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1645.670117] env[63024]: DEBUG nova.virt.hardware [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1645.670290] env[63024]: DEBUG nova.virt.hardware [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1645.671151] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee2094ab-d8fd-427d-840b-b58e82d94a83 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.683127] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-029d64df-209c-4379-bf03-aca2a6181268 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.687753] env[63024]: DEBUG oslo_vmware.api [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': task-1950593, 'name': Rename_Task} progress is 10%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1645.757613] env[63024]: DEBUG nova.scheduler.client.report [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1645.999178] env[63024]: DEBUG oslo_concurrency.lockutils [None req-704c2017-2beb-4aac-8bf7-f4066d6f5da0 tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Lock "85d6db13-d317-498e-a36a-972e9b36e82b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.349s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1646.181194] env[63024]: DEBUG oslo_vmware.api [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': task-1950593, 'name': Rename_Task, 'duration_secs': 0.201546} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1646.181535] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1646.181751] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5d0df1d4-09bd-40b5-bbdc-431e24404537 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.192899] env[63024]: DEBUG oslo_vmware.api [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Waiting for the task: (returnval){ [ 1646.192899] env[63024]: value = "task-1950594" [ 1646.192899] env[63024]: _type = "Task" [ 1646.192899] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1646.209741] env[63024]: DEBUG oslo_vmware.api [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': task-1950594, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.262197] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.675s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1646.262754] env[63024]: DEBUG nova.compute.manager [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1646.266623] env[63024]: DEBUG oslo_concurrency.lockutils [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 27.074s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1646.266623] env[63024]: DEBUG nova.objects.instance [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63024) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1646.316922] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a6ccd98c-046f-49d4-8435-f0e021b8096f tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Acquiring lock "85d6db13-d317-498e-a36a-972e9b36e82b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1646.317589] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a6ccd98c-046f-49d4-8435-f0e021b8096f tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Lock "85d6db13-d317-498e-a36a-972e9b36e82b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1646.317851] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a6ccd98c-046f-49d4-8435-f0e021b8096f tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Acquiring lock "85d6db13-d317-498e-a36a-972e9b36e82b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1646.318076] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a6ccd98c-046f-49d4-8435-f0e021b8096f tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Lock "85d6db13-d317-498e-a36a-972e9b36e82b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1646.319966] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a6ccd98c-046f-49d4-8435-f0e021b8096f tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Lock "85d6db13-d317-498e-a36a-972e9b36e82b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1646.323187] env[63024]: INFO nova.compute.manager [None req-a6ccd98c-046f-49d4-8435-f0e021b8096f tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] Terminating instance [ 1646.502330] env[63024]: DEBUG nova.compute.manager [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1646.712511] env[63024]: DEBUG oslo_vmware.api [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': task-1950594, 'name': PowerOnVM_Task} progress is 1%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.769736] env[63024]: DEBUG nova.compute.utils [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1646.769736] env[63024]: DEBUG nova.compute.manager [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1646.769736] env[63024]: DEBUG nova.network.neutron [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1646.833331] env[63024]: DEBUG nova.compute.manager [None req-a6ccd98c-046f-49d4-8435-f0e021b8096f tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1646.833331] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-a6ccd98c-046f-49d4-8435-f0e021b8096f tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1646.833331] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22487c4a-eb20-4ee0-8a51-918abf87533c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.841717] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6ccd98c-046f-49d4-8435-f0e021b8096f tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1646.842173] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1c72d9e3-fff8-45cf-8fb8-2c0a7ddcbba8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.850325] env[63024]: DEBUG oslo_vmware.api [None req-a6ccd98c-046f-49d4-8435-f0e021b8096f tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Waiting for the task: (returnval){ [ 1646.850325] env[63024]: value = "task-1950595" [ 1646.850325] env[63024]: _type = "Task" [ 1646.850325] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1646.860150] env[63024]: DEBUG oslo_vmware.api [None req-a6ccd98c-046f-49d4-8435-f0e021b8096f tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Task: {'id': task-1950595, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1646.882935] env[63024]: INFO nova.compute.manager [None req-2462c09f-7180-44df-a659-e2fad5972e31 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Unrescuing [ 1646.883451] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2462c09f-7180-44df-a659-e2fad5972e31 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Acquiring lock "refresh_cache-d49eae54-cccb-4281-aaa0-d6974529eb7b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1646.883721] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2462c09f-7180-44df-a659-e2fad5972e31 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Acquired lock "refresh_cache-d49eae54-cccb-4281-aaa0-d6974529eb7b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1646.883996] env[63024]: DEBUG nova.network.neutron [None req-2462c09f-7180-44df-a659-e2fad5972e31 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1646.897474] env[63024]: DEBUG nova.policy [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a2cc094a0a6b444ab1880fcfb1de4e8b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6bbfeec6d47746328f185acd132e0d5a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1647.037437] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1647.039694] env[63024]: DEBUG nova.compute.manager [req-44d6ede9-2c02-4625-b092-d5ca1aaa6904 req-f80381ff-7088-466f-8aaf-639cc1f32773 service nova] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Received event network-vif-plugged-72b9a842-8d9f-4bc9-945a-1b144bf0e58c {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1647.039911] env[63024]: DEBUG oslo_concurrency.lockutils [req-44d6ede9-2c02-4625-b092-d5ca1aaa6904 req-f80381ff-7088-466f-8aaf-639cc1f32773 service nova] Acquiring lock "ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1647.040920] env[63024]: DEBUG oslo_concurrency.lockutils [req-44d6ede9-2c02-4625-b092-d5ca1aaa6904 req-f80381ff-7088-466f-8aaf-639cc1f32773 service nova] Lock "ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1647.041180] env[63024]: DEBUG oslo_concurrency.lockutils [req-44d6ede9-2c02-4625-b092-d5ca1aaa6904 req-f80381ff-7088-466f-8aaf-639cc1f32773 service nova] Lock "ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1647.041415] env[63024]: DEBUG nova.compute.manager [req-44d6ede9-2c02-4625-b092-d5ca1aaa6904 req-f80381ff-7088-466f-8aaf-639cc1f32773 service nova] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] No waiting events found dispatching network-vif-plugged-72b9a842-8d9f-4bc9-945a-1b144bf0e58c {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1647.041652] env[63024]: WARNING nova.compute.manager [req-44d6ede9-2c02-4625-b092-d5ca1aaa6904 req-f80381ff-7088-466f-8aaf-639cc1f32773 service nova] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Received unexpected event network-vif-plugged-72b9a842-8d9f-4bc9-945a-1b144bf0e58c for instance with vm_state building and task_state spawning. [ 1647.210088] env[63024]: DEBUG oslo_vmware.api [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': task-1950594, 'name': PowerOnVM_Task} progress is 64%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.252157] env[63024]: DEBUG nova.network.neutron [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Successfully updated port: 72b9a842-8d9f-4bc9-945a-1b144bf0e58c {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1647.274013] env[63024]: DEBUG nova.compute.manager [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1647.282391] env[63024]: DEBUG oslo_concurrency.lockutils [None req-739f9504-91e5-452f-84b5-84e8b8525a3b tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.016s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1647.287740] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.021s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1647.289031] env[63024]: INFO nova.compute.claims [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1647.364832] env[63024]: DEBUG oslo_vmware.api [None req-a6ccd98c-046f-49d4-8435-f0e021b8096f tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Task: {'id': task-1950595, 'name': PowerOffVM_Task, 'duration_secs': 0.264374} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.364832] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6ccd98c-046f-49d4-8435-f0e021b8096f tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1647.364832] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-a6ccd98c-046f-49d4-8435-f0e021b8096f tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1647.368097] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4f85a872-d62d-404b-89e6-e86cb2524f9d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.444306] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-a6ccd98c-046f-49d4-8435-f0e021b8096f tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1647.444306] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-a6ccd98c-046f-49d4-8435-f0e021b8096f tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1647.444444] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6ccd98c-046f-49d4-8435-f0e021b8096f tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Deleting the datastore file [datastore1] 85d6db13-d317-498e-a36a-972e9b36e82b {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1647.445046] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fc8a6d4e-26da-4382-85d3-9246dd1f2991 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.453735] env[63024]: DEBUG oslo_vmware.api [None req-a6ccd98c-046f-49d4-8435-f0e021b8096f tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Waiting for the task: (returnval){ [ 1647.453735] env[63024]: value = "task-1950597" [ 1647.453735] env[63024]: _type = "Task" [ 1647.453735] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.464225] env[63024]: DEBUG oslo_vmware.api [None req-a6ccd98c-046f-49d4-8435-f0e021b8096f tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Task: {'id': task-1950597, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.600283] env[63024]: DEBUG nova.network.neutron [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Successfully created port: 209c547a-fef6-4e81-9221-59b72099faa5 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1647.713537] env[63024]: DEBUG oslo_vmware.api [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': task-1950594, 'name': PowerOnVM_Task, 'duration_secs': 1.393148} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.716108] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1647.716350] env[63024]: INFO nova.compute.manager [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Took 10.48 seconds to spawn the instance on the hypervisor. [ 1647.716791] env[63024]: DEBUG nova.compute.manager [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1647.717371] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-629ede16-5f9b-4f77-8a50-e7423e6b0361 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.756974] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Acquiring lock "refresh_cache-ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1647.757138] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Acquired lock "refresh_cache-ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1647.757308] env[63024]: DEBUG nova.network.neutron [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1647.960287] env[63024]: DEBUG nova.network.neutron [None req-2462c09f-7180-44df-a659-e2fad5972e31 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Updating instance_info_cache with network_info: [{"id": "776bbe97-34ad-47f3-9045-81bb3c16a126", "address": "fa:16:3e:e6:1d:cf", "network": {"id": "f42f7ff4-b2ef-45fd-8230-5f16271d5808", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-607041553-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "5489a064ee1d44f0bd6c496f4775b9d6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "56834f67-27a8-43dc-bbc6-a74aaa08959b", "external-id": "nsx-vlan-transportzone-949", "segmentation_id": 949, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap776bbe97-34", "ovs_interfaceid": "776bbe97-34ad-47f3-9045-81bb3c16a126", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1647.969966] env[63024]: DEBUG oslo_vmware.api [None req-a6ccd98c-046f-49d4-8435-f0e021b8096f tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Task: {'id': task-1950597, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.319365} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.970261] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6ccd98c-046f-49d4-8435-f0e021b8096f tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1647.970846] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-a6ccd98c-046f-49d4-8435-f0e021b8096f tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1647.970846] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-a6ccd98c-046f-49d4-8435-f0e021b8096f tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1647.970935] env[63024]: INFO nova.compute.manager [None req-a6ccd98c-046f-49d4-8435-f0e021b8096f tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1647.971230] env[63024]: DEBUG oslo.service.loopingcall [None req-a6ccd98c-046f-49d4-8435-f0e021b8096f tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1647.971476] env[63024]: DEBUG nova.compute.manager [-] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1647.971600] env[63024]: DEBUG nova.network.neutron [-] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1648.245101] env[63024]: INFO nova.compute.manager [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Took 41.50 seconds to build instance. [ 1648.293622] env[63024]: DEBUG nova.compute.manager [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1648.329876] env[63024]: DEBUG nova.virt.hardware [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1648.330089] env[63024]: DEBUG nova.virt.hardware [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1648.330323] env[63024]: DEBUG nova.virt.hardware [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1648.332125] env[63024]: DEBUG nova.virt.hardware [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1648.332125] env[63024]: DEBUG nova.virt.hardware [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1648.332125] env[63024]: DEBUG nova.virt.hardware [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1648.332125] env[63024]: DEBUG nova.virt.hardware [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1648.332125] env[63024]: DEBUG nova.virt.hardware [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1648.332292] env[63024]: DEBUG nova.virt.hardware [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1648.332292] env[63024]: DEBUG nova.virt.hardware [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1648.332292] env[63024]: DEBUG nova.virt.hardware [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1648.332833] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24c3c45c-2bd2-476c-81f4-8bc5770005f1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.344333] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-284310c4-588a-4f37-9188-5f6fdac08449 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.362587] env[63024]: DEBUG nova.network.neutron [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1648.466039] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2462c09f-7180-44df-a659-e2fad5972e31 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Releasing lock "refresh_cache-d49eae54-cccb-4281-aaa0-d6974529eb7b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1648.466039] env[63024]: DEBUG nova.objects.instance [None req-2462c09f-7180-44df-a659-e2fad5972e31 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Lazy-loading 'flavor' on Instance uuid d49eae54-cccb-4281-aaa0-d6974529eb7b {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1648.749596] env[63024]: DEBUG oslo_concurrency.lockutils [None req-74ad8fa4-e997-4d3f-81de-a4370a3f6f11 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Lock "b7f26f0e-d5a9-42a6-8af2-065659f89cf5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.432s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1648.788294] env[63024]: DEBUG nova.network.neutron [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Updating instance_info_cache with network_info: [{"id": "72b9a842-8d9f-4bc9-945a-1b144bf0e58c", "address": "fa:16:3e:d9:03:11", "network": {"id": "b22bfe1a-f169-41c3-ac9b-fdcf93268110", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.83", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d78401abb63840f4b461856cfdb6dbbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72b9a842-8d", "ovs_interfaceid": "72b9a842-8d9f-4bc9-945a-1b144bf0e58c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1648.912543] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56dba5c3-4bbf-4fb8-bb18-50f9f8a60bc2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.922216] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0deb375c-8d49-468b-9bb6-823b55ea9197 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.961647] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfa46f75-376b-4498-872a-adc9bd5d7ba7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.971541] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e079665e-d0c6-49c9-a734-84f6007df5bd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.975264] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-491b0308-e930-49f0-8e6e-66ff2777abf1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1648.999088] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-2462c09f-7180-44df-a659-e2fad5972e31 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1649.010067] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2dbc8efc-9e7b-47cb-b66d-08fb24d89c3d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.011883] env[63024]: DEBUG nova.compute.provider_tree [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1649.019342] env[63024]: DEBUG oslo_vmware.api [None req-2462c09f-7180-44df-a659-e2fad5972e31 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Waiting for the task: (returnval){ [ 1649.019342] env[63024]: value = "task-1950598" [ 1649.019342] env[63024]: _type = "Task" [ 1649.019342] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1649.027918] env[63024]: DEBUG oslo_vmware.api [None req-2462c09f-7180-44df-a659-e2fad5972e31 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950598, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.134405] env[63024]: DEBUG nova.network.neutron [-] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1649.230015] env[63024]: DEBUG nova.compute.manager [req-41bdb4de-26ec-439d-84f6-d5cf27a65ce3 req-078c4259-52b1-45ec-bda8-23c9444c89c0 service nova] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Received event network-changed-72b9a842-8d9f-4bc9-945a-1b144bf0e58c {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1649.230264] env[63024]: DEBUG nova.compute.manager [req-41bdb4de-26ec-439d-84f6-d5cf27a65ce3 req-078c4259-52b1-45ec-bda8-23c9444c89c0 service nova] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Refreshing instance network info cache due to event network-changed-72b9a842-8d9f-4bc9-945a-1b144bf0e58c. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1649.230670] env[63024]: DEBUG oslo_concurrency.lockutils [req-41bdb4de-26ec-439d-84f6-d5cf27a65ce3 req-078c4259-52b1-45ec-bda8-23c9444c89c0 service nova] Acquiring lock "refresh_cache-ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1649.253272] env[63024]: DEBUG nova.compute.manager [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1649.290992] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Releasing lock "refresh_cache-ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1649.291502] env[63024]: DEBUG nova.compute.manager [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Instance network_info: |[{"id": "72b9a842-8d9f-4bc9-945a-1b144bf0e58c", "address": "fa:16:3e:d9:03:11", "network": {"id": "b22bfe1a-f169-41c3-ac9b-fdcf93268110", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.83", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d78401abb63840f4b461856cfdb6dbbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72b9a842-8d", "ovs_interfaceid": "72b9a842-8d9f-4bc9-945a-1b144bf0e58c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1649.291820] env[63024]: DEBUG oslo_concurrency.lockutils [req-41bdb4de-26ec-439d-84f6-d5cf27a65ce3 req-078c4259-52b1-45ec-bda8-23c9444c89c0 service nova] Acquired lock "refresh_cache-ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1649.292086] env[63024]: DEBUG nova.network.neutron [req-41bdb4de-26ec-439d-84f6-d5cf27a65ce3 req-078c4259-52b1-45ec-bda8-23c9444c89c0 service nova] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Refreshing network info cache for port 72b9a842-8d9f-4bc9-945a-1b144bf0e58c {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1649.293306] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d9:03:11', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8652fa2-e608-4fe6-8b35-402a40906b40', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '72b9a842-8d9f-4bc9-945a-1b144bf0e58c', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1649.302434] env[63024]: DEBUG oslo.service.loopingcall [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1649.303518] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1649.303787] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6217fa5e-21b2-4348-b47f-665b232573ac {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.335180] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1649.335180] env[63024]: value = "task-1950599" [ 1649.335180] env[63024]: _type = "Task" [ 1649.335180] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1649.351267] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950599, 'name': CreateVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.515542] env[63024]: DEBUG nova.scheduler.client.report [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1649.536124] env[63024]: DEBUG oslo_vmware.api [None req-2462c09f-7180-44df-a659-e2fad5972e31 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950598, 'name': PowerOffVM_Task, 'duration_secs': 0.282105} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1649.536450] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-2462c09f-7180-44df-a659-e2fad5972e31 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1649.543845] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-2462c09f-7180-44df-a659-e2fad5972e31 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Reconfiguring VM instance instance-00000016 to detach disk 2001 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1649.544973] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f89ee18b-23ac-4422-bed2-cac77d4ab17c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.566956] env[63024]: DEBUG oslo_vmware.api [None req-2462c09f-7180-44df-a659-e2fad5972e31 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Waiting for the task: (returnval){ [ 1649.566956] env[63024]: value = "task-1950600" [ 1649.566956] env[63024]: _type = "Task" [ 1649.566956] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1649.576648] env[63024]: DEBUG oslo_vmware.api [None req-2462c09f-7180-44df-a659-e2fad5972e31 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950600, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1649.640968] env[63024]: INFO nova.compute.manager [-] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] Took 1.67 seconds to deallocate network for instance. [ 1649.787592] env[63024]: DEBUG oslo_concurrency.lockutils [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1649.846140] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950599, 'name': CreateVM_Task, 'duration_secs': 0.351554} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1649.846140] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1649.846329] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1649.846627] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1649.846958] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1649.847234] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bca95b5c-3b90-4664-949e-d924c3303dca {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1649.853142] env[63024]: DEBUG oslo_vmware.api [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Waiting for the task: (returnval){ [ 1649.853142] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52395ac5-bc34-d5ee-391c-ba99e6df3b08" [ 1649.853142] env[63024]: _type = "Task" [ 1649.853142] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1649.863847] env[63024]: DEBUG oslo_vmware.api [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52395ac5-bc34-d5ee-391c-ba99e6df3b08, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.030484] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.743s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1650.031020] env[63024]: DEBUG nova.compute.manager [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1650.034123] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.750s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1650.036941] env[63024]: INFO nova.compute.claims [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1650.079503] env[63024]: DEBUG oslo_vmware.api [None req-2462c09f-7180-44df-a659-e2fad5972e31 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950600, 'name': ReconfigVM_Task, 'duration_secs': 0.265867} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1650.079774] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-2462c09f-7180-44df-a659-e2fad5972e31 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Reconfigured VM instance instance-00000016 to detach disk 2001 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1650.079986] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-2462c09f-7180-44df-a659-e2fad5972e31 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1650.080297] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-02fc16f5-cb30-449e-a441-9e3789fc9d81 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.088647] env[63024]: DEBUG oslo_vmware.api [None req-2462c09f-7180-44df-a659-e2fad5972e31 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Waiting for the task: (returnval){ [ 1650.088647] env[63024]: value = "task-1950601" [ 1650.088647] env[63024]: _type = "Task" [ 1650.088647] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1650.100482] env[63024]: DEBUG oslo_vmware.api [None req-2462c09f-7180-44df-a659-e2fad5972e31 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950601, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.149786] env[63024]: DEBUG nova.network.neutron [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Successfully updated port: 209c547a-fef6-4e81-9221-59b72099faa5 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1650.150951] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a6ccd98c-046f-49d4-8435-f0e021b8096f tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1650.191286] env[63024]: DEBUG nova.compute.manager [req-3206e241-038f-4b89-83dc-6a42ef198083 req-e39cc470-55f7-45ab-9410-caa5288e5b37 service nova] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Received event network-vif-plugged-209c547a-fef6-4e81-9221-59b72099faa5 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1650.191368] env[63024]: DEBUG oslo_concurrency.lockutils [req-3206e241-038f-4b89-83dc-6a42ef198083 req-e39cc470-55f7-45ab-9410-caa5288e5b37 service nova] Acquiring lock "e8ad74ce-7862-4574-98e7-14bc54bd5d6c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1650.191602] env[63024]: DEBUG oslo_concurrency.lockutils [req-3206e241-038f-4b89-83dc-6a42ef198083 req-e39cc470-55f7-45ab-9410-caa5288e5b37 service nova] Lock "e8ad74ce-7862-4574-98e7-14bc54bd5d6c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1650.191691] env[63024]: DEBUG oslo_concurrency.lockutils [req-3206e241-038f-4b89-83dc-6a42ef198083 req-e39cc470-55f7-45ab-9410-caa5288e5b37 service nova] Lock "e8ad74ce-7862-4574-98e7-14bc54bd5d6c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1650.191864] env[63024]: DEBUG nova.compute.manager [req-3206e241-038f-4b89-83dc-6a42ef198083 req-e39cc470-55f7-45ab-9410-caa5288e5b37 service nova] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] No waiting events found dispatching network-vif-plugged-209c547a-fef6-4e81-9221-59b72099faa5 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1650.193157] env[63024]: WARNING nova.compute.manager [req-3206e241-038f-4b89-83dc-6a42ef198083 req-e39cc470-55f7-45ab-9410-caa5288e5b37 service nova] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Received unexpected event network-vif-plugged-209c547a-fef6-4e81-9221-59b72099faa5 for instance with vm_state building and task_state spawning. [ 1650.274305] env[63024]: DEBUG nova.network.neutron [req-41bdb4de-26ec-439d-84f6-d5cf27a65ce3 req-078c4259-52b1-45ec-bda8-23c9444c89c0 service nova] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Updated VIF entry in instance network info cache for port 72b9a842-8d9f-4bc9-945a-1b144bf0e58c. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1650.274709] env[63024]: DEBUG nova.network.neutron [req-41bdb4de-26ec-439d-84f6-d5cf27a65ce3 req-078c4259-52b1-45ec-bda8-23c9444c89c0 service nova] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Updating instance_info_cache with network_info: [{"id": "72b9a842-8d9f-4bc9-945a-1b144bf0e58c", "address": "fa:16:3e:d9:03:11", "network": {"id": "b22bfe1a-f169-41c3-ac9b-fdcf93268110", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.83", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d78401abb63840f4b461856cfdb6dbbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap72b9a842-8d", "ovs_interfaceid": "72b9a842-8d9f-4bc9-945a-1b144bf0e58c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1650.369560] env[63024]: DEBUG oslo_vmware.api [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52395ac5-bc34-d5ee-391c-ba99e6df3b08, 'name': SearchDatastore_Task, 'duration_secs': 0.03038} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1650.370328] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1650.370804] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1650.371333] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1650.371724] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1650.372156] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1650.372674] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d02d3930-e705-48b8-97dd-12ba8ff78c2b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.387145] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1650.387462] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1650.388652] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6c7c675-ef99-4e93-9f2c-2c8c39ba7504 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.396339] env[63024]: DEBUG oslo_vmware.api [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Waiting for the task: (returnval){ [ 1650.396339] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52bd1341-745b-bfef-82de-8c091e73d45c" [ 1650.396339] env[63024]: _type = "Task" [ 1650.396339] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1650.408968] env[63024]: DEBUG oslo_vmware.api [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52bd1341-745b-bfef-82de-8c091e73d45c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.541828] env[63024]: DEBUG nova.compute.utils [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1650.543736] env[63024]: DEBUG nova.compute.manager [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1650.547503] env[63024]: DEBUG nova.network.neutron [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1650.606882] env[63024]: DEBUG oslo_vmware.api [None req-2462c09f-7180-44df-a659-e2fad5972e31 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950601, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1650.655520] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "refresh_cache-e8ad74ce-7862-4574-98e7-14bc54bd5d6c" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1650.655520] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquired lock "refresh_cache-e8ad74ce-7862-4574-98e7-14bc54bd5d6c" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1650.655520] env[63024]: DEBUG nova.network.neutron [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1650.691311] env[63024]: DEBUG nova.policy [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eeec10db12414951b45e800aaccba1fe', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3b4b24878df6488e8ca0334376eb6577', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1650.781605] env[63024]: DEBUG oslo_concurrency.lockutils [req-41bdb4de-26ec-439d-84f6-d5cf27a65ce3 req-078c4259-52b1-45ec-bda8-23c9444c89c0 service nova] Releasing lock "refresh_cache-ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1650.781605] env[63024]: DEBUG nova.compute.manager [req-41bdb4de-26ec-439d-84f6-d5cf27a65ce3 req-078c4259-52b1-45ec-bda8-23c9444c89c0 service nova] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] Received event network-vif-deleted-f9a7bea2-256c-4de5-9b61-91fa78298f4a {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1650.909068] env[63024]: DEBUG oslo_vmware.api [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52bd1341-745b-bfef-82de-8c091e73d45c, 'name': SearchDatastore_Task, 'duration_secs': 0.011884} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1650.909306] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-978de3aa-1c8d-4a70-ae3b-843dca343002 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1650.915314] env[63024]: DEBUG oslo_vmware.api [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Waiting for the task: (returnval){ [ 1650.915314] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52ecbe10-94e1-4233-1554-2ed28d2000c2" [ 1650.915314] env[63024]: _type = "Task" [ 1650.915314] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1650.924464] env[63024]: DEBUG oslo_vmware.api [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52ecbe10-94e1-4233-1554-2ed28d2000c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.054670] env[63024]: DEBUG nova.compute.manager [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1651.105660] env[63024]: DEBUG oslo_vmware.api [None req-2462c09f-7180-44df-a659-e2fad5972e31 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950601, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.155899] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Acquiring lock "9679a1a2-b003-4a60-a812-8b3a9b5f545f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1651.156305] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Lock "9679a1a2-b003-4a60-a812-8b3a9b5f545f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1651.214671] env[63024]: DEBUG nova.network.neutron [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1651.370544] env[63024]: DEBUG nova.network.neutron [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Successfully created port: d38e9043-9c6b-47a5-b6db-5d1ac7035c12 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1651.424788] env[63024]: DEBUG nova.network.neutron [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Updating instance_info_cache with network_info: [{"id": "209c547a-fef6-4e81-9221-59b72099faa5", "address": "fa:16:3e:f0:17:5b", "network": {"id": "8b46e914-e3e8-40c4-8f9d-01d1f97bf25e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1558746173-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bbfeec6d47746328f185acd132e0d5a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afae6acd-1873-4228-9d5a-1cd5d4efe3e4", "external-id": "nsx-vlan-transportzone-183", "segmentation_id": 183, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap209c547a-fe", "ovs_interfaceid": "209c547a-fef6-4e81-9221-59b72099faa5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1651.436491] env[63024]: DEBUG oslo_vmware.api [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52ecbe10-94e1-4233-1554-2ed28d2000c2, 'name': SearchDatastore_Task, 'duration_secs': 0.036498} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1651.436780] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1651.437545] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d/ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1651.437868] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3b68596f-e8cc-49bf-9c91-bc80b94a9fb8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.447666] env[63024]: DEBUG oslo_vmware.api [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Waiting for the task: (returnval){ [ 1651.447666] env[63024]: value = "task-1950606" [ 1651.447666] env[63024]: _type = "Task" [ 1651.447666] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1651.457077] env[63024]: DEBUG oslo_vmware.api [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Task: {'id': task-1950606, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.606427] env[63024]: DEBUG oslo_vmware.api [None req-2462c09f-7180-44df-a659-e2fad5972e31 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950601, 'name': PowerOnVM_Task, 'duration_secs': 1.214553} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1651.609266] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-2462c09f-7180-44df-a659-e2fad5972e31 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1651.609518] env[63024]: DEBUG nova.compute.manager [None req-2462c09f-7180-44df-a659-e2fad5972e31 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1651.611901] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25252495-8fc2-4dad-9884-5714ebdd7eef {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.623845] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9608c73-668b-4b65-94b7-749c545be029 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.632333] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0ccdee8-9b5c-402e-9776-6f34598d6d23 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.669080] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ab0cfbb-12c9-46d1-9b70-7bd086259b6a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.679309] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-413ae9bb-bc1e-4296-a63c-4d03a112714b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.696025] env[63024]: DEBUG nova.compute.provider_tree [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1651.930963] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Releasing lock "refresh_cache-e8ad74ce-7862-4574-98e7-14bc54bd5d6c" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1651.931643] env[63024]: DEBUG nova.compute.manager [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Instance network_info: |[{"id": "209c547a-fef6-4e81-9221-59b72099faa5", "address": "fa:16:3e:f0:17:5b", "network": {"id": "8b46e914-e3e8-40c4-8f9d-01d1f97bf25e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1558746173-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bbfeec6d47746328f185acd132e0d5a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afae6acd-1873-4228-9d5a-1cd5d4efe3e4", "external-id": "nsx-vlan-transportzone-183", "segmentation_id": 183, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap209c547a-fe", "ovs_interfaceid": "209c547a-fef6-4e81-9221-59b72099faa5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1651.935238] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f0:17:5b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'afae6acd-1873-4228-9d5a-1cd5d4efe3e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '209c547a-fef6-4e81-9221-59b72099faa5', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1651.942074] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Creating folder: Project (6bbfeec6d47746328f185acd132e0d5a). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1651.942447] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-50ec8b51-6905-4264-95a2-52cd75f80b3d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.959434] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Created folder: Project (6bbfeec6d47746328f185acd132e0d5a) in parent group-v401959. [ 1651.959726] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Creating folder: Instances. Parent ref: group-v402041. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1651.961397] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2997f222-5bb0-409b-b5d8-7c9b7ade6b4f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1651.969502] env[63024]: DEBUG oslo_vmware.api [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Task: {'id': task-1950606, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1651.977874] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Created folder: Instances in parent group-v402041. [ 1651.978154] env[63024]: DEBUG oslo.service.loopingcall [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1651.978386] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1651.978615] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-10f1f1c3-843a-4316-874e-8f317b2ad12c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.007824] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1652.007824] env[63024]: value = "task-1950609" [ 1652.007824] env[63024]: _type = "Task" [ 1652.007824] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1652.019080] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950609, 'name': CreateVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.079036] env[63024]: DEBUG nova.compute.manager [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1652.118610] env[63024]: DEBUG nova.virt.hardware [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1652.118853] env[63024]: DEBUG nova.virt.hardware [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1652.119012] env[63024]: DEBUG nova.virt.hardware [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1652.119196] env[63024]: DEBUG nova.virt.hardware [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1652.119335] env[63024]: DEBUG nova.virt.hardware [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1652.119476] env[63024]: DEBUG nova.virt.hardware [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1652.119733] env[63024]: DEBUG nova.virt.hardware [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1652.119918] env[63024]: DEBUG nova.virt.hardware [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1652.120131] env[63024]: DEBUG nova.virt.hardware [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1652.120333] env[63024]: DEBUG nova.virt.hardware [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1652.120534] env[63024]: DEBUG nova.virt.hardware [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1652.121514] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9541f03-7e1c-4bb4-85e3-162036f6b9ff {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.135257] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c53c8a0a-037a-4db4-a008-61ec519bd90b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.199389] env[63024]: DEBUG nova.scheduler.client.report [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1652.318356] env[63024]: DEBUG nova.compute.manager [req-94be729d-f24d-4f2f-9764-ae2010616a7c req-b67039fe-0e9b-4656-8788-ef95e8bc96a5 service nova] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Received event network-changed-209c547a-fef6-4e81-9221-59b72099faa5 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1652.318510] env[63024]: DEBUG nova.compute.manager [req-94be729d-f24d-4f2f-9764-ae2010616a7c req-b67039fe-0e9b-4656-8788-ef95e8bc96a5 service nova] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Refreshing instance network info cache due to event network-changed-209c547a-fef6-4e81-9221-59b72099faa5. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1652.318729] env[63024]: DEBUG oslo_concurrency.lockutils [req-94be729d-f24d-4f2f-9764-ae2010616a7c req-b67039fe-0e9b-4656-8788-ef95e8bc96a5 service nova] Acquiring lock "refresh_cache-e8ad74ce-7862-4574-98e7-14bc54bd5d6c" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1652.318866] env[63024]: DEBUG oslo_concurrency.lockutils [req-94be729d-f24d-4f2f-9764-ae2010616a7c req-b67039fe-0e9b-4656-8788-ef95e8bc96a5 service nova] Acquired lock "refresh_cache-e8ad74ce-7862-4574-98e7-14bc54bd5d6c" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1652.319034] env[63024]: DEBUG nova.network.neutron [req-94be729d-f24d-4f2f-9764-ae2010616a7c req-b67039fe-0e9b-4656-8788-ef95e8bc96a5 service nova] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Refreshing network info cache for port 209c547a-fef6-4e81-9221-59b72099faa5 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1652.461191] env[63024]: DEBUG oslo_vmware.api [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Task: {'id': task-1950606, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.680158} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1652.461594] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d/ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1652.461892] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1652.462195] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-65397a3f-746f-4982-a7da-80d8ddaed7a8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.469558] env[63024]: DEBUG oslo_vmware.api [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Waiting for the task: (returnval){ [ 1652.469558] env[63024]: value = "task-1950610" [ 1652.469558] env[63024]: _type = "Task" [ 1652.469558] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1652.478631] env[63024]: DEBUG oslo_vmware.api [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Task: {'id': task-1950610, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.517058] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950609, 'name': CreateVM_Task, 'duration_secs': 0.45799} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1652.518039] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1652.518140] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1652.518286] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1652.518806] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1652.519071] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f8007de-5381-4908-ac4f-1f34ef035086 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.526095] env[63024]: DEBUG oslo_vmware.api [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1652.526095] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]524c1ebe-86e2-9885-93a8-bfbb3d110f14" [ 1652.526095] env[63024]: _type = "Task" [ 1652.526095] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1652.535504] env[63024]: DEBUG oslo_vmware.api [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]524c1ebe-86e2-9885-93a8-bfbb3d110f14, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.565082] env[63024]: DEBUG oslo_vmware.rw_handles [None req-734108b8-059a-4224-9eea-52ce6e096ae6 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e30721-7715-c62f-8d8e-f148e51574f2/disk-0.vmdk. {{(pid=63024) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1652.565082] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a0407c7-ab73-4abc-a206-980ab34b667d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.572607] env[63024]: DEBUG oslo_vmware.rw_handles [None req-734108b8-059a-4224-9eea-52ce6e096ae6 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e30721-7715-c62f-8d8e-f148e51574f2/disk-0.vmdk is in state: ready. {{(pid=63024) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1652.572804] env[63024]: ERROR oslo_vmware.rw_handles [None req-734108b8-059a-4224-9eea-52ce6e096ae6 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e30721-7715-c62f-8d8e-f148e51574f2/disk-0.vmdk due to incomplete transfer. [ 1652.573142] env[63024]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-0ff2308b-a6eb-42da-a306-e1cbfb395167 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.587747] env[63024]: DEBUG oslo_vmware.rw_handles [None req-734108b8-059a-4224-9eea-52ce6e096ae6 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52e30721-7715-c62f-8d8e-f148e51574f2/disk-0.vmdk. {{(pid=63024) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1652.587884] env[63024]: DEBUG nova.virt.vmwareapi.images [None req-734108b8-059a-4224-9eea-52ce6e096ae6 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Uploaded image 369dafe1-0a7e-4da6-8181-df656ccea797 to the Glance image server {{(pid=63024) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1652.590517] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-734108b8-059a-4224-9eea-52ce6e096ae6 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Destroying the VM {{(pid=63024) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1652.591134] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-16805b60-d23a-461e-8337-27c518e0b8b7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.598018] env[63024]: DEBUG oslo_vmware.api [None req-734108b8-059a-4224-9eea-52ce6e096ae6 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Waiting for the task: (returnval){ [ 1652.598018] env[63024]: value = "task-1950611" [ 1652.598018] env[63024]: _type = "Task" [ 1652.598018] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1652.611736] env[63024]: DEBUG oslo_vmware.api [None req-734108b8-059a-4224-9eea-52ce6e096ae6 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950611, 'name': Destroy_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1652.706152] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.672s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1652.707251] env[63024]: DEBUG nova.compute.manager [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1652.710293] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 24.785s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1652.711423] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1652.711607] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63024) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1652.711933] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.964s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1652.713354] env[63024]: INFO nova.compute.claims [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1652.716959] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdffba55-ba61-4c4b-bc78-2c0d01dbe28d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.725419] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f92fc0b8-dcd9-474f-91a7-280d9d1210c3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.739862] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e54a4dd-c474-456f-8e59-46a4ae250245 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.749467] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96aef40e-f630-437a-837e-7be6df1f8b52 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1652.780899] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179780MB free_disk=170GB free_vcpus=48 pci_devices=None {{(pid=63024) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1652.781118] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1652.983670] env[63024]: DEBUG oslo_concurrency.lockutils [None req-00a457ee-1f38-45d0-a954-b2bf3d01c737 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Acquiring lock "d49eae54-cccb-4281-aaa0-d6974529eb7b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1652.983935] env[63024]: DEBUG oslo_concurrency.lockutils [None req-00a457ee-1f38-45d0-a954-b2bf3d01c737 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Lock "d49eae54-cccb-4281-aaa0-d6974529eb7b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1652.984166] env[63024]: DEBUG oslo_concurrency.lockutils [None req-00a457ee-1f38-45d0-a954-b2bf3d01c737 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Acquiring lock "d49eae54-cccb-4281-aaa0-d6974529eb7b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1652.984441] env[63024]: DEBUG oslo_concurrency.lockutils [None req-00a457ee-1f38-45d0-a954-b2bf3d01c737 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Lock "d49eae54-cccb-4281-aaa0-d6974529eb7b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1652.984612] env[63024]: DEBUG oslo_concurrency.lockutils [None req-00a457ee-1f38-45d0-a954-b2bf3d01c737 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Lock "d49eae54-cccb-4281-aaa0-d6974529eb7b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1652.986372] env[63024]: DEBUG oslo_vmware.api [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Task: {'id': task-1950610, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072637} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1652.986841] env[63024]: INFO nova.compute.manager [None req-00a457ee-1f38-45d0-a954-b2bf3d01c737 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Terminating instance [ 1652.988798] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1652.992194] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d372098-a272-48da-83f2-d90fd34defcf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.021944] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Reconfiguring VM instance instance-0000001a to attach disk [datastore1] ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d/ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1653.022915] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-01ce335b-c363-4126-8824-f7e4cb7dd9fd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.051185] env[63024]: DEBUG oslo_vmware.api [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]524c1ebe-86e2-9885-93a8-bfbb3d110f14, 'name': SearchDatastore_Task, 'duration_secs': 0.010379} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1653.052442] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1653.052684] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1653.052914] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1653.053074] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1653.053252] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1653.053568] env[63024]: DEBUG oslo_vmware.api [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Waiting for the task: (returnval){ [ 1653.053568] env[63024]: value = "task-1950613" [ 1653.053568] env[63024]: _type = "Task" [ 1653.053568] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1653.053747] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d2a5f768-6451-44d5-97c8-f4b276a1a160 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.066358] env[63024]: DEBUG oslo_vmware.api [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Task: {'id': task-1950613, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.082774] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1653.083008] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1653.083741] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ff41e1b-c099-40bf-acb1-60739f27e4f8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.090683] env[63024]: DEBUG oslo_vmware.api [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1653.090683] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5292b392-552d-262a-5ed4-189c3eed1f41" [ 1653.090683] env[63024]: _type = "Task" [ 1653.090683] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1653.097180] env[63024]: DEBUG nova.network.neutron [req-94be729d-f24d-4f2f-9764-ae2010616a7c req-b67039fe-0e9b-4656-8788-ef95e8bc96a5 service nova] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Updated VIF entry in instance network info cache for port 209c547a-fef6-4e81-9221-59b72099faa5. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1653.097870] env[63024]: DEBUG nova.network.neutron [req-94be729d-f24d-4f2f-9764-ae2010616a7c req-b67039fe-0e9b-4656-8788-ef95e8bc96a5 service nova] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Updating instance_info_cache with network_info: [{"id": "209c547a-fef6-4e81-9221-59b72099faa5", "address": "fa:16:3e:f0:17:5b", "network": {"id": "8b46e914-e3e8-40c4-8f9d-01d1f97bf25e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1558746173-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bbfeec6d47746328f185acd132e0d5a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afae6acd-1873-4228-9d5a-1cd5d4efe3e4", "external-id": "nsx-vlan-transportzone-183", "segmentation_id": 183, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap209c547a-fe", "ovs_interfaceid": "209c547a-fef6-4e81-9221-59b72099faa5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1653.105611] env[63024]: DEBUG oslo_vmware.api [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5292b392-552d-262a-5ed4-189c3eed1f41, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.112673] env[63024]: DEBUG oslo_vmware.api [None req-734108b8-059a-4224-9eea-52ce6e096ae6 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950611, 'name': Destroy_Task} progress is 33%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.219209] env[63024]: DEBUG nova.compute.utils [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1653.222236] env[63024]: DEBUG nova.compute.manager [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1653.222417] env[63024]: DEBUG nova.network.neutron [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1653.283478] env[63024]: DEBUG nova.policy [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '54b7a5c8406e44e3a00cf903bc74e48d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '99c4328f2c8c4139b4eace4b465e37e3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1653.499217] env[63024]: DEBUG nova.compute.manager [None req-00a457ee-1f38-45d0-a954-b2bf3d01c737 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1653.500160] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-00a457ee-1f38-45d0-a954-b2bf3d01c737 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1653.501257] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad74392c-cd08-47d2-967d-8aa5f309457b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.509939] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-00a457ee-1f38-45d0-a954-b2bf3d01c737 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1653.510371] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ea41b2b9-9ff9-4052-b629-fcc1bcdddf63 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.517583] env[63024]: DEBUG oslo_vmware.api [None req-00a457ee-1f38-45d0-a954-b2bf3d01c737 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Waiting for the task: (returnval){ [ 1653.517583] env[63024]: value = "task-1950614" [ 1653.517583] env[63024]: _type = "Task" [ 1653.517583] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1653.528023] env[63024]: DEBUG oslo_vmware.api [None req-00a457ee-1f38-45d0-a954-b2bf3d01c737 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950614, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.566845] env[63024]: DEBUG oslo_vmware.api [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Task: {'id': task-1950613, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.575746] env[63024]: DEBUG nova.network.neutron [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Successfully updated port: d38e9043-9c6b-47a5-b6db-5d1ac7035c12 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1653.608145] env[63024]: DEBUG oslo_concurrency.lockutils [req-94be729d-f24d-4f2f-9764-ae2010616a7c req-b67039fe-0e9b-4656-8788-ef95e8bc96a5 service nova] Releasing lock "refresh_cache-e8ad74ce-7862-4574-98e7-14bc54bd5d6c" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1653.608411] env[63024]: DEBUG nova.compute.manager [req-94be729d-f24d-4f2f-9764-ae2010616a7c req-b67039fe-0e9b-4656-8788-ef95e8bc96a5 service nova] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Received event network-changed-c1430120-4c82-424a-8155-f1e22eb4a7ae {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1653.608575] env[63024]: DEBUG nova.compute.manager [req-94be729d-f24d-4f2f-9764-ae2010616a7c req-b67039fe-0e9b-4656-8788-ef95e8bc96a5 service nova] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Refreshing instance network info cache due to event network-changed-c1430120-4c82-424a-8155-f1e22eb4a7ae. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1653.608777] env[63024]: DEBUG oslo_concurrency.lockutils [req-94be729d-f24d-4f2f-9764-ae2010616a7c req-b67039fe-0e9b-4656-8788-ef95e8bc96a5 service nova] Acquiring lock "refresh_cache-b7f26f0e-d5a9-42a6-8af2-065659f89cf5" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1653.610225] env[63024]: DEBUG oslo_concurrency.lockutils [req-94be729d-f24d-4f2f-9764-ae2010616a7c req-b67039fe-0e9b-4656-8788-ef95e8bc96a5 service nova] Acquired lock "refresh_cache-b7f26f0e-d5a9-42a6-8af2-065659f89cf5" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1653.610225] env[63024]: DEBUG nova.network.neutron [req-94be729d-f24d-4f2f-9764-ae2010616a7c req-b67039fe-0e9b-4656-8788-ef95e8bc96a5 service nova] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Refreshing network info cache for port c1430120-4c82-424a-8155-f1e22eb4a7ae {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1653.611223] env[63024]: DEBUG oslo_vmware.api [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5292b392-552d-262a-5ed4-189c3eed1f41, 'name': SearchDatastore_Task, 'duration_secs': 0.03338} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1653.613795] env[63024]: DEBUG nova.network.neutron [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Successfully created port: 9f2961a8-afb7-4cf5-8517-52799f5c0bd1 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1653.616075] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0dccc353-ae8b-4502-b3e9-990ebc55ddf5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1653.625864] env[63024]: DEBUG oslo_vmware.api [None req-734108b8-059a-4224-9eea-52ce6e096ae6 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950611, 'name': Destroy_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.627672] env[63024]: DEBUG oslo_vmware.api [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1653.627672] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52fad90a-1629-fb81-4a9f-4ad5c36f2041" [ 1653.627672] env[63024]: _type = "Task" [ 1653.627672] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1653.636925] env[63024]: DEBUG oslo_vmware.api [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52fad90a-1629-fb81-4a9f-4ad5c36f2041, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1653.723577] env[63024]: DEBUG nova.compute.manager [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1653.731972] env[63024]: DEBUG nova.compute.manager [req-dcac39ae-88bf-401a-8bd3-a2814b485d32 req-acda7bc8-345c-4fc7-aecc-e32990658580 service nova] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Received event network-vif-plugged-d38e9043-9c6b-47a5-b6db-5d1ac7035c12 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1653.735516] env[63024]: DEBUG oslo_concurrency.lockutils [req-dcac39ae-88bf-401a-8bd3-a2814b485d32 req-acda7bc8-345c-4fc7-aecc-e32990658580 service nova] Acquiring lock "00e925a1-9b79-46e2-b7f7-c0b63e1e72df-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1653.735516] env[63024]: DEBUG oslo_concurrency.lockutils [req-dcac39ae-88bf-401a-8bd3-a2814b485d32 req-acda7bc8-345c-4fc7-aecc-e32990658580 service nova] Lock "00e925a1-9b79-46e2-b7f7-c0b63e1e72df-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1653.735516] env[63024]: DEBUG oslo_concurrency.lockutils [req-dcac39ae-88bf-401a-8bd3-a2814b485d32 req-acda7bc8-345c-4fc7-aecc-e32990658580 service nova] Lock "00e925a1-9b79-46e2-b7f7-c0b63e1e72df-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1653.735516] env[63024]: DEBUG nova.compute.manager [req-dcac39ae-88bf-401a-8bd3-a2814b485d32 req-acda7bc8-345c-4fc7-aecc-e32990658580 service nova] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] No waiting events found dispatching network-vif-plugged-d38e9043-9c6b-47a5-b6db-5d1ac7035c12 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1653.735516] env[63024]: WARNING nova.compute.manager [req-dcac39ae-88bf-401a-8bd3-a2814b485d32 req-acda7bc8-345c-4fc7-aecc-e32990658580 service nova] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Received unexpected event network-vif-plugged-d38e9043-9c6b-47a5-b6db-5d1ac7035c12 for instance with vm_state building and task_state spawning. [ 1653.832564] env[63024]: DEBUG oslo_concurrency.lockutils [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Acquiring lock "f6fddc23-ad36-4d6f-82a2-ded456b2596e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1653.832794] env[63024]: DEBUG oslo_concurrency.lockutils [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Lock "f6fddc23-ad36-4d6f-82a2-ded456b2596e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1654.031300] env[63024]: DEBUG oslo_vmware.api [None req-00a457ee-1f38-45d0-a954-b2bf3d01c737 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950614, 'name': PowerOffVM_Task, 'duration_secs': 0.240995} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1654.032258] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-00a457ee-1f38-45d0-a954-b2bf3d01c737 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1654.032258] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-00a457ee-1f38-45d0-a954-b2bf3d01c737 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1654.032258] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0cf02906-3e23-4142-a152-f2f779b81c3b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.073207] env[63024]: DEBUG oslo_vmware.api [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Task: {'id': task-1950613, 'name': ReconfigVM_Task, 'duration_secs': 0.576286} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1654.074186] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Reconfigured VM instance instance-0000001a to attach disk [datastore1] ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d/ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1654.074186] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6e2a2c47-f04f-4989-8958-9607ed80ea51 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.078629] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Acquiring lock "refresh_cache-00e925a1-9b79-46e2-b7f7-c0b63e1e72df" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1654.078766] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Acquired lock "refresh_cache-00e925a1-9b79-46e2-b7f7-c0b63e1e72df" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1654.078909] env[63024]: DEBUG nova.network.neutron [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1654.082933] env[63024]: DEBUG oslo_vmware.api [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Waiting for the task: (returnval){ [ 1654.082933] env[63024]: value = "task-1950616" [ 1654.082933] env[63024]: _type = "Task" [ 1654.082933] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1654.089479] env[63024]: DEBUG oslo_vmware.api [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Task: {'id': task-1950616, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1654.114617] env[63024]: DEBUG oslo_vmware.api [None req-734108b8-059a-4224-9eea-52ce6e096ae6 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950611, 'name': Destroy_Task, 'duration_secs': 1.026642} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1654.114808] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-734108b8-059a-4224-9eea-52ce6e096ae6 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Destroyed the VM [ 1654.115098] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-734108b8-059a-4224-9eea-52ce6e096ae6 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Deleting Snapshot of the VM instance {{(pid=63024) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1654.115662] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-ab15a4ff-a80d-4a44-ab6f-c44dcc68476c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.125419] env[63024]: DEBUG oslo_vmware.api [None req-734108b8-059a-4224-9eea-52ce6e096ae6 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Waiting for the task: (returnval){ [ 1654.125419] env[63024]: value = "task-1950617" [ 1654.125419] env[63024]: _type = "Task" [ 1654.125419] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1654.139808] env[63024]: DEBUG oslo_vmware.api [None req-734108b8-059a-4224-9eea-52ce6e096ae6 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950617, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1654.146258] env[63024]: DEBUG oslo_vmware.api [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52fad90a-1629-fb81-4a9f-4ad5c36f2041, 'name': SearchDatastore_Task, 'duration_secs': 0.038149} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1654.148842] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1654.148842] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] e8ad74ce-7862-4574-98e7-14bc54bd5d6c/e8ad74ce-7862-4574-98e7-14bc54bd5d6c.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1654.149123] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-beff9355-0104-49a7-a1ee-2f9fa3c68476 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.157937] env[63024]: DEBUG oslo_vmware.api [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1654.157937] env[63024]: value = "task-1950618" [ 1654.157937] env[63024]: _type = "Task" [ 1654.157937] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1654.166577] env[63024]: DEBUG oslo_vmware.api [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1950618, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1654.168824] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45b0da22-b809-4421-a5f5-355105aaebeb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.175739] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c956fe5-7841-4a64-9180-fb7214f8f13e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.210178] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abe9aadc-d7d2-40fc-b3c5-6a6650faf89d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.218287] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a48b75c4-69e5-45cd-96c2-17a1c611825a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.235726] env[63024]: DEBUG nova.compute.provider_tree [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1654.312821] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-00a457ee-1f38-45d0-a954-b2bf3d01c737 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1654.313967] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-00a457ee-1f38-45d0-a954-b2bf3d01c737 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1654.313967] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-00a457ee-1f38-45d0-a954-b2bf3d01c737 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Deleting the datastore file [datastore1] d49eae54-cccb-4281-aaa0-d6974529eb7b {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1654.313967] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7a040659-bb0f-478e-ae28-b9f6153e864c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.319872] env[63024]: DEBUG oslo_vmware.api [None req-00a457ee-1f38-45d0-a954-b2bf3d01c737 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Waiting for the task: (returnval){ [ 1654.319872] env[63024]: value = "task-1950619" [ 1654.319872] env[63024]: _type = "Task" [ 1654.319872] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1654.327335] env[63024]: DEBUG oslo_vmware.api [None req-00a457ee-1f38-45d0-a954-b2bf3d01c737 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950619, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1654.451491] env[63024]: DEBUG nova.network.neutron [req-94be729d-f24d-4f2f-9764-ae2010616a7c req-b67039fe-0e9b-4656-8788-ef95e8bc96a5 service nova] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Updated VIF entry in instance network info cache for port c1430120-4c82-424a-8155-f1e22eb4a7ae. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1654.451851] env[63024]: DEBUG nova.network.neutron [req-94be729d-f24d-4f2f-9764-ae2010616a7c req-b67039fe-0e9b-4656-8788-ef95e8bc96a5 service nova] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Updating instance_info_cache with network_info: [{"id": "c1430120-4c82-424a-8155-f1e22eb4a7ae", "address": "fa:16:3e:57:65:47", "network": {"id": "19563e44-3f43-4aee-8e65-abfb07528a6a", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-310580499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.248", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5d51c3beec44aecb65ba72dffb42d40", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "03ac2c9c-6ad2-4a85-bfab-c7e336df859a", "external-id": "nsx-vlan-transportzone-379", "segmentation_id": 379, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc1430120-4c", "ovs_interfaceid": "c1430120-4c82-424a-8155-f1e22eb4a7ae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1654.593097] env[63024]: DEBUG oslo_vmware.api [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Task: {'id': task-1950616, 'name': Rename_Task, 'duration_secs': 0.144979} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1654.593387] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1654.593624] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-44c0c666-4284-4b29-9c5e-ac0f26ea8516 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.599839] env[63024]: DEBUG oslo_vmware.api [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Waiting for the task: (returnval){ [ 1654.599839] env[63024]: value = "task-1950620" [ 1654.599839] env[63024]: _type = "Task" [ 1654.599839] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1654.607798] env[63024]: DEBUG oslo_vmware.api [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Task: {'id': task-1950620, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1654.631787] env[63024]: DEBUG nova.network.neutron [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1654.640665] env[63024]: DEBUG oslo_vmware.api [None req-734108b8-059a-4224-9eea-52ce6e096ae6 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950617, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1654.670637] env[63024]: DEBUG oslo_vmware.api [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1950618, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1654.740504] env[63024]: DEBUG nova.compute.manager [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1654.781157] env[63024]: ERROR nova.scheduler.client.report [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [req-ab6b9fd6-2001-469b-b20e-13567889c6e2] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 89dfa68a-133e-436f-a9f1-86051f9fb96b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-ab6b9fd6-2001-469b-b20e-13567889c6e2"}]} [ 1654.790368] env[63024]: DEBUG nova.virt.hardware [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1654.790616] env[63024]: DEBUG nova.virt.hardware [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1654.790764] env[63024]: DEBUG nova.virt.hardware [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1654.790932] env[63024]: DEBUG nova.virt.hardware [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1654.791182] env[63024]: DEBUG nova.virt.hardware [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1654.791356] env[63024]: DEBUG nova.virt.hardware [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1654.791564] env[63024]: DEBUG nova.virt.hardware [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1654.791725] env[63024]: DEBUG nova.virt.hardware [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1654.791887] env[63024]: DEBUG nova.virt.hardware [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1654.792082] env[63024]: DEBUG nova.virt.hardware [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1654.792275] env[63024]: DEBUG nova.virt.hardware [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1654.793408] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf21fbd0-42a3-467d-92d0-dbd6fad55653 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.800184] env[63024]: DEBUG nova.scheduler.client.report [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Refreshing inventories for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1654.805294] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b2d329e-3677-4ca7-96de-0749040179b1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1654.813749] env[63024]: DEBUG nova.scheduler.client.report [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Updating ProviderTree inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1654.813979] env[63024]: DEBUG nova.compute.provider_tree [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1654.837589] env[63024]: DEBUG oslo_vmware.api [None req-00a457ee-1f38-45d0-a954-b2bf3d01c737 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950619, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1654.838788] env[63024]: DEBUG nova.scheduler.client.report [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Refreshing aggregate associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, aggregates: None {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1654.862916] env[63024]: DEBUG nova.scheduler.client.report [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Refreshing trait associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1654.903708] env[63024]: DEBUG nova.network.neutron [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Updating instance_info_cache with network_info: [{"id": "d38e9043-9c6b-47a5-b6db-5d1ac7035c12", "address": "fa:16:3e:74:d0:96", "network": {"id": "b22bfe1a-f169-41c3-ac9b-fdcf93268110", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.206", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d78401abb63840f4b461856cfdb6dbbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd38e9043-9c", "ovs_interfaceid": "d38e9043-9c6b-47a5-b6db-5d1ac7035c12", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1654.954795] env[63024]: DEBUG oslo_concurrency.lockutils [req-94be729d-f24d-4f2f-9764-ae2010616a7c req-b67039fe-0e9b-4656-8788-ef95e8bc96a5 service nova] Releasing lock "refresh_cache-b7f26f0e-d5a9-42a6-8af2-065659f89cf5" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1655.117397] env[63024]: DEBUG oslo_vmware.api [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Task: {'id': task-1950620, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1655.121354] env[63024]: DEBUG nova.compute.manager [req-3a3a76f7-4d42-4071-9d09-770c81df6fb0 req-9909c6bd-6874-4916-b73d-60a3f0df31a1 service nova] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Received event network-vif-plugged-9f2961a8-afb7-4cf5-8517-52799f5c0bd1 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1655.121608] env[63024]: DEBUG oslo_concurrency.lockutils [req-3a3a76f7-4d42-4071-9d09-770c81df6fb0 req-9909c6bd-6874-4916-b73d-60a3f0df31a1 service nova] Acquiring lock "1ad97ed0-2a84-4783-8511-e0f6b24861bd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1655.121684] env[63024]: DEBUG oslo_concurrency.lockutils [req-3a3a76f7-4d42-4071-9d09-770c81df6fb0 req-9909c6bd-6874-4916-b73d-60a3f0df31a1 service nova] Lock "1ad97ed0-2a84-4783-8511-e0f6b24861bd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1655.122859] env[63024]: DEBUG oslo_concurrency.lockutils [req-3a3a76f7-4d42-4071-9d09-770c81df6fb0 req-9909c6bd-6874-4916-b73d-60a3f0df31a1 service nova] Lock "1ad97ed0-2a84-4783-8511-e0f6b24861bd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1655.122859] env[63024]: DEBUG nova.compute.manager [req-3a3a76f7-4d42-4071-9d09-770c81df6fb0 req-9909c6bd-6874-4916-b73d-60a3f0df31a1 service nova] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] No waiting events found dispatching network-vif-plugged-9f2961a8-afb7-4cf5-8517-52799f5c0bd1 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1655.123237] env[63024]: WARNING nova.compute.manager [req-3a3a76f7-4d42-4071-9d09-770c81df6fb0 req-9909c6bd-6874-4916-b73d-60a3f0df31a1 service nova] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Received unexpected event network-vif-plugged-9f2961a8-afb7-4cf5-8517-52799f5c0bd1 for instance with vm_state building and task_state spawning. [ 1655.138694] env[63024]: DEBUG oslo_vmware.api [None req-734108b8-059a-4224-9eea-52ce6e096ae6 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950617, 'name': RemoveSnapshot_Task} progress is 50%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1655.174557] env[63024]: DEBUG oslo_vmware.api [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1950618, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.768441} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1655.175039] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] e8ad74ce-7862-4574-98e7-14bc54bd5d6c/e8ad74ce-7862-4574-98e7-14bc54bd5d6c.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1655.175266] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1655.176037] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4ba46849-0a37-442f-8e30-a6862d88b872 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.183580] env[63024]: DEBUG oslo_vmware.api [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1655.183580] env[63024]: value = "task-1950622" [ 1655.183580] env[63024]: _type = "Task" [ 1655.183580] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1655.198713] env[63024]: DEBUG oslo_vmware.api [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1950622, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1655.206048] env[63024]: DEBUG nova.network.neutron [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Successfully updated port: 9f2961a8-afb7-4cf5-8517-52799f5c0bd1 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1655.339954] env[63024]: DEBUG oslo_vmware.api [None req-00a457ee-1f38-45d0-a954-b2bf3d01c737 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950619, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.654754} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1655.339954] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-00a457ee-1f38-45d0-a954-b2bf3d01c737 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1655.340096] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-00a457ee-1f38-45d0-a954-b2bf3d01c737 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1655.340697] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-00a457ee-1f38-45d0-a954-b2bf3d01c737 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1655.340697] env[63024]: INFO nova.compute.manager [None req-00a457ee-1f38-45d0-a954-b2bf3d01c737 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Took 1.84 seconds to destroy the instance on the hypervisor. [ 1655.341368] env[63024]: DEBUG oslo.service.loopingcall [None req-00a457ee-1f38-45d0-a954-b2bf3d01c737 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1655.341368] env[63024]: DEBUG nova.compute.manager [-] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1655.341368] env[63024]: DEBUG nova.network.neutron [-] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1655.345617] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1b923b7-8db6-4665-935e-a9f0fb83fb41 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.354404] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-712cc276-0058-4079-b31c-41a6e7407761 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.390095] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7d3f6de-6e57-41b9-9b2d-1b816447ac18 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.404387] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6462ebb7-c956-4a2f-a631-b972aede0ca5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.408888] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Releasing lock "refresh_cache-00e925a1-9b79-46e2-b7f7-c0b63e1e72df" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1655.409209] env[63024]: DEBUG nova.compute.manager [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Instance network_info: |[{"id": "d38e9043-9c6b-47a5-b6db-5d1ac7035c12", "address": "fa:16:3e:74:d0:96", "network": {"id": "b22bfe1a-f169-41c3-ac9b-fdcf93268110", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.206", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d78401abb63840f4b461856cfdb6dbbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd38e9043-9c", "ovs_interfaceid": "d38e9043-9c6b-47a5-b6db-5d1ac7035c12", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1655.409600] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:74:d0:96', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8652fa2-e608-4fe6-8b35-402a40906b40', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd38e9043-9c6b-47a5-b6db-5d1ac7035c12', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1655.416877] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Creating folder: Project (3b4b24878df6488e8ca0334376eb6577). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1655.419938] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-68ef5216-ddca-4d3f-a313-6621937d99dd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.429976] env[63024]: DEBUG nova.compute.provider_tree [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1655.441340] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Created folder: Project (3b4b24878df6488e8ca0334376eb6577) in parent group-v401959. [ 1655.441570] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Creating folder: Instances. Parent ref: group-v402044. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1655.442121] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2a2844a9-8bef-4b3d-be9a-7cc7303f8303 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.450902] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Created folder: Instances in parent group-v402044. [ 1655.451147] env[63024]: DEBUG oslo.service.loopingcall [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1655.451330] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1655.451535] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d9e3a502-de66-4f7f-a0e0-0ee370c61258 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.474954] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1655.474954] env[63024]: value = "task-1950625" [ 1655.474954] env[63024]: _type = "Task" [ 1655.474954] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1655.479747] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950625, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1655.613079] env[63024]: DEBUG oslo_vmware.api [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Task: {'id': task-1950620, 'name': PowerOnVM_Task, 'duration_secs': 0.862065} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1655.613480] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1655.613634] env[63024]: INFO nova.compute.manager [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Took 9.98 seconds to spawn the instance on the hypervisor. [ 1655.613824] env[63024]: DEBUG nova.compute.manager [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1655.614609] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6133eccb-b9f7-4d79-b8f9-e91d2b18b4ce {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.639708] env[63024]: DEBUG oslo_vmware.api [None req-734108b8-059a-4224-9eea-52ce6e096ae6 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950617, 'name': RemoveSnapshot_Task, 'duration_secs': 1.278732} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1655.639708] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-734108b8-059a-4224-9eea-52ce6e096ae6 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Deleted Snapshot of the VM instance {{(pid=63024) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1655.639708] env[63024]: INFO nova.compute.manager [None req-734108b8-059a-4224-9eea-52ce6e096ae6 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Took 19.81 seconds to snapshot the instance on the hypervisor. [ 1655.696277] env[63024]: DEBUG oslo_vmware.api [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1950622, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069564} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1655.696596] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1655.698023] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49d36294-cb0f-44fa-83d2-729e4a9bc74c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.712924] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquiring lock "refresh_cache-1ad97ed0-2a84-4783-8511-e0f6b24861bd" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1655.713116] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquired lock "refresh_cache-1ad97ed0-2a84-4783-8511-e0f6b24861bd" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1655.713300] env[63024]: DEBUG nova.network.neutron [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1655.723337] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Reconfiguring VM instance instance-0000001b to attach disk [datastore1] e8ad74ce-7862-4574-98e7-14bc54bd5d6c/e8ad74ce-7862-4574-98e7-14bc54bd5d6c.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1655.726256] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6ae28fa3-7128-4f61-806a-704efdbc64b7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1655.747324] env[63024]: DEBUG oslo_vmware.api [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1655.747324] env[63024]: value = "task-1950626" [ 1655.747324] env[63024]: _type = "Task" [ 1655.747324] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1655.755707] env[63024]: DEBUG oslo_vmware.api [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1950626, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1655.761506] env[63024]: DEBUG nova.compute.manager [req-0d1011e2-c78e-4137-b244-97da2150cdb4 req-cc32db12-1c60-4959-b2ab-684ec1f3408a service nova] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Received event network-changed-d38e9043-9c6b-47a5-b6db-5d1ac7035c12 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1655.761735] env[63024]: DEBUG nova.compute.manager [req-0d1011e2-c78e-4137-b244-97da2150cdb4 req-cc32db12-1c60-4959-b2ab-684ec1f3408a service nova] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Refreshing instance network info cache due to event network-changed-d38e9043-9c6b-47a5-b6db-5d1ac7035c12. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1655.761980] env[63024]: DEBUG oslo_concurrency.lockutils [req-0d1011e2-c78e-4137-b244-97da2150cdb4 req-cc32db12-1c60-4959-b2ab-684ec1f3408a service nova] Acquiring lock "refresh_cache-00e925a1-9b79-46e2-b7f7-c0b63e1e72df" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1655.762150] env[63024]: DEBUG oslo_concurrency.lockutils [req-0d1011e2-c78e-4137-b244-97da2150cdb4 req-cc32db12-1c60-4959-b2ab-684ec1f3408a service nova] Acquired lock "refresh_cache-00e925a1-9b79-46e2-b7f7-c0b63e1e72df" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1655.762314] env[63024]: DEBUG nova.network.neutron [req-0d1011e2-c78e-4137-b244-97da2150cdb4 req-cc32db12-1c60-4959-b2ab-684ec1f3408a service nova] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Refreshing network info cache for port d38e9043-9c6b-47a5-b6db-5d1ac7035c12 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1655.784829] env[63024]: DEBUG nova.network.neutron [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1655.964187] env[63024]: DEBUG nova.scheduler.client.report [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Updated inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with generation 51 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1655.964466] env[63024]: DEBUG nova.compute.provider_tree [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Updating resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b generation from 51 to 52 during operation: update_inventory {{(pid=63024) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1655.964644] env[63024]: DEBUG nova.compute.provider_tree [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1655.981937] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950625, 'name': CreateVM_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1656.132690] env[63024]: INFO nova.compute.manager [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Took 41.55 seconds to build instance. [ 1656.233027] env[63024]: DEBUG nova.network.neutron [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Updating instance_info_cache with network_info: [{"id": "9f2961a8-afb7-4cf5-8517-52799f5c0bd1", "address": "fa:16:3e:23:0f:27", "network": {"id": "0719de66-1f31-4596-a9a1-11d65b13c2e5", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1221667646-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "99c4328f2c8c4139b4eace4b465e37e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e0c77754-4085-434b-a3e8-d61be099ac67", "external-id": "nsx-vlan-transportzone-822", "segmentation_id": 822, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f2961a8-af", "ovs_interfaceid": "9f2961a8-afb7-4cf5-8517-52799f5c0bd1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1656.259194] env[63024]: DEBUG oslo_vmware.api [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1950626, 'name': ReconfigVM_Task, 'duration_secs': 0.278255} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1656.259573] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Reconfigured VM instance instance-0000001b to attach disk [datastore1] e8ad74ce-7862-4574-98e7-14bc54bd5d6c/e8ad74ce-7862-4574-98e7-14bc54bd5d6c.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1656.260304] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e421bfe1-7fcb-4bd5-8a81-5ec625a23993 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.267908] env[63024]: DEBUG oslo_vmware.api [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1656.267908] env[63024]: value = "task-1950627" [ 1656.267908] env[63024]: _type = "Task" [ 1656.267908] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1656.277021] env[63024]: DEBUG oslo_vmware.api [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1950627, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1656.338227] env[63024]: DEBUG nova.network.neutron [-] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1656.470106] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.758s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1656.470635] env[63024]: DEBUG nova.compute.manager [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1656.473205] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.588s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1656.474705] env[63024]: INFO nova.compute.claims [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1656.498359] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950625, 'name': CreateVM_Task, 'duration_secs': 0.882899} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1656.500242] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1656.500242] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1656.500242] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1656.500551] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1656.500805] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98ab2384-1046-48d4-8516-697315ed718c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.510941] env[63024]: DEBUG oslo_vmware.api [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Waiting for the task: (returnval){ [ 1656.510941] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52a9be44-eaac-76a8-134f-4dbd22332482" [ 1656.510941] env[63024]: _type = "Task" [ 1656.510941] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1656.518199] env[63024]: DEBUG oslo_vmware.api [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52a9be44-eaac-76a8-134f-4dbd22332482, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1656.592683] env[63024]: DEBUG nova.network.neutron [req-0d1011e2-c78e-4137-b244-97da2150cdb4 req-cc32db12-1c60-4959-b2ab-684ec1f3408a service nova] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Updated VIF entry in instance network info cache for port d38e9043-9c6b-47a5-b6db-5d1ac7035c12. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1656.593784] env[63024]: DEBUG nova.network.neutron [req-0d1011e2-c78e-4137-b244-97da2150cdb4 req-cc32db12-1c60-4959-b2ab-684ec1f3408a service nova] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Updating instance_info_cache with network_info: [{"id": "d38e9043-9c6b-47a5-b6db-5d1ac7035c12", "address": "fa:16:3e:74:d0:96", "network": {"id": "b22bfe1a-f169-41c3-ac9b-fdcf93268110", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.206", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d78401abb63840f4b461856cfdb6dbbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd38e9043-9c", "ovs_interfaceid": "d38e9043-9c6b-47a5-b6db-5d1ac7035c12", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1656.635593] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0408d54b-92fc-4deb-92af-79e4848d2668 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Lock "ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.884s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1656.735588] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Releasing lock "refresh_cache-1ad97ed0-2a84-4783-8511-e0f6b24861bd" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1656.735975] env[63024]: DEBUG nova.compute.manager [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Instance network_info: |[{"id": "9f2961a8-afb7-4cf5-8517-52799f5c0bd1", "address": "fa:16:3e:23:0f:27", "network": {"id": "0719de66-1f31-4596-a9a1-11d65b13c2e5", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1221667646-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "99c4328f2c8c4139b4eace4b465e37e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e0c77754-4085-434b-a3e8-d61be099ac67", "external-id": "nsx-vlan-transportzone-822", "segmentation_id": 822, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f2961a8-af", "ovs_interfaceid": "9f2961a8-afb7-4cf5-8517-52799f5c0bd1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1656.736445] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:23:0f:27', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e0c77754-4085-434b-a3e8-d61be099ac67', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9f2961a8-afb7-4cf5-8517-52799f5c0bd1', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1656.746507] env[63024]: DEBUG oslo.service.loopingcall [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1656.746730] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1656.746947] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2e6ce1f7-c6e5-40d6-9497-7b8e0fce4800 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.767111] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1656.767111] env[63024]: value = "task-1950628" [ 1656.767111] env[63024]: _type = "Task" [ 1656.767111] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1656.786087] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950628, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1656.786555] env[63024]: DEBUG oslo_vmware.api [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1950627, 'name': Rename_Task, 'duration_secs': 0.138165} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1656.786820] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1656.787143] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0ae20ec7-8c60-48df-81c7-0182239681cb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1656.793688] env[63024]: DEBUG oslo_vmware.api [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1656.793688] env[63024]: value = "task-1950629" [ 1656.793688] env[63024]: _type = "Task" [ 1656.793688] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1656.801704] env[63024]: DEBUG oslo_vmware.api [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1950629, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1656.841492] env[63024]: INFO nova.compute.manager [-] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Took 1.50 seconds to deallocate network for instance. [ 1656.983707] env[63024]: DEBUG nova.compute.utils [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1656.985339] env[63024]: DEBUG nova.compute.manager [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1656.985568] env[63024]: DEBUG nova.network.neutron [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1657.019681] env[63024]: DEBUG oslo_vmware.api [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52a9be44-eaac-76a8-134f-4dbd22332482, 'name': SearchDatastore_Task, 'duration_secs': 0.009841} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1657.020227] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1657.020590] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1657.021041] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1657.021337] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1657.021713] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1657.022198] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d836635f-7fb4-4283-bbdc-cb430fcdcace {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.033023] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1657.033023] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1657.033023] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b33573a0-3dc5-45c1-9c63-c47adda6825f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.037245] env[63024]: DEBUG oslo_vmware.api [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Waiting for the task: (returnval){ [ 1657.037245] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52275434-0b3c-e178-d69d-91340745d2c3" [ 1657.037245] env[63024]: _type = "Task" [ 1657.037245] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1657.046662] env[63024]: DEBUG oslo_vmware.api [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52275434-0b3c-e178-d69d-91340745d2c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1657.051412] env[63024]: DEBUG nova.policy [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3fc112b4851e4dbeac3a69409e7bf98e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1886be852b01400aaf7a31c8fe5d4d7a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1657.098240] env[63024]: DEBUG oslo_concurrency.lockutils [req-0d1011e2-c78e-4137-b244-97da2150cdb4 req-cc32db12-1c60-4959-b2ab-684ec1f3408a service nova] Releasing lock "refresh_cache-00e925a1-9b79-46e2-b7f7-c0b63e1e72df" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1657.107643] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49cfc70c-36d6-42c9-9455-41590d37d94c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.114139] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-41d7e42d-f8b6-45ce-8217-48f00e73e146 tempest-ServersAdminNegativeTestJSON-1822127807 tempest-ServersAdminNegativeTestJSON-1822127807-project-admin] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Suspending the VM {{(pid=63024) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1657.114642] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-25bd1db4-2125-481c-a80f-062a013de3df {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.120104] env[63024]: DEBUG oslo_vmware.api [None req-41d7e42d-f8b6-45ce-8217-48f00e73e146 tempest-ServersAdminNegativeTestJSON-1822127807 tempest-ServersAdminNegativeTestJSON-1822127807-project-admin] Waiting for the task: (returnval){ [ 1657.120104] env[63024]: value = "task-1950631" [ 1657.120104] env[63024]: _type = "Task" [ 1657.120104] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1657.128852] env[63024]: DEBUG oslo_vmware.api [None req-41d7e42d-f8b6-45ce-8217-48f00e73e146 tempest-ServersAdminNegativeTestJSON-1822127807 tempest-ServersAdminNegativeTestJSON-1822127807-project-admin] Task: {'id': task-1950631, 'name': SuspendVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1657.137961] env[63024]: DEBUG nova.compute.manager [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1657.277063] env[63024]: DEBUG nova.compute.manager [req-777b7e5f-6692-4e5e-b2c3-d5eb610b1363 req-33c86087-4629-4834-bb6c-e8c380d41568 service nova] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Received event network-changed-9f2961a8-afb7-4cf5-8517-52799f5c0bd1 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1657.277063] env[63024]: DEBUG nova.compute.manager [req-777b7e5f-6692-4e5e-b2c3-d5eb610b1363 req-33c86087-4629-4834-bb6c-e8c380d41568 service nova] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Refreshing instance network info cache due to event network-changed-9f2961a8-afb7-4cf5-8517-52799f5c0bd1. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1657.282026] env[63024]: DEBUG oslo_concurrency.lockutils [req-777b7e5f-6692-4e5e-b2c3-d5eb610b1363 req-33c86087-4629-4834-bb6c-e8c380d41568 service nova] Acquiring lock "refresh_cache-1ad97ed0-2a84-4783-8511-e0f6b24861bd" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1657.282026] env[63024]: DEBUG oslo_concurrency.lockutils [req-777b7e5f-6692-4e5e-b2c3-d5eb610b1363 req-33c86087-4629-4834-bb6c-e8c380d41568 service nova] Acquired lock "refresh_cache-1ad97ed0-2a84-4783-8511-e0f6b24861bd" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1657.282026] env[63024]: DEBUG nova.network.neutron [req-777b7e5f-6692-4e5e-b2c3-d5eb610b1363 req-33c86087-4629-4834-bb6c-e8c380d41568 service nova] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Refreshing network info cache for port 9f2961a8-afb7-4cf5-8517-52799f5c0bd1 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1657.287146] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950628, 'name': CreateVM_Task, 'duration_secs': 0.451022} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1657.287763] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1657.288616] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1657.288952] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1657.289423] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1657.290624] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a437ced-ba67-4a5f-a8a1-a09325e88207 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.303374] env[63024]: DEBUG oslo_vmware.api [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1657.303374] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52ac2d29-283e-8418-c2d7-6f2269bf06c0" [ 1657.303374] env[63024]: _type = "Task" [ 1657.303374] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1657.311984] env[63024]: DEBUG oslo_vmware.api [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1950629, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1657.318319] env[63024]: DEBUG oslo_vmware.api [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52ac2d29-283e-8418-c2d7-6f2269bf06c0, 'name': SearchDatastore_Task, 'duration_secs': 0.019018} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1657.318683] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1657.318970] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1657.319247] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1657.348009] env[63024]: DEBUG oslo_concurrency.lockutils [None req-00a457ee-1f38-45d0-a954-b2bf3d01c737 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1657.489149] env[63024]: DEBUG nova.compute.manager [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1657.552297] env[63024]: DEBUG oslo_vmware.api [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52275434-0b3c-e178-d69d-91340745d2c3, 'name': SearchDatastore_Task, 'duration_secs': 0.011451} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1657.554265] env[63024]: DEBUG nova.network.neutron [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Successfully created port: 9a7cae93-d7ad-4e7c-b18b-00e7e749299e {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1657.561813] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b5b9c73-4f54-4b3c-9795-326907b7fad5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.574021] env[63024]: DEBUG oslo_vmware.api [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Waiting for the task: (returnval){ [ 1657.574021] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52872b17-dd36-37d1-256b-63d03ff070dd" [ 1657.574021] env[63024]: _type = "Task" [ 1657.574021] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1657.581608] env[63024]: DEBUG oslo_vmware.api [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52872b17-dd36-37d1-256b-63d03ff070dd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1657.634320] env[63024]: DEBUG oslo_vmware.api [None req-41d7e42d-f8b6-45ce-8217-48f00e73e146 tempest-ServersAdminNegativeTestJSON-1822127807 tempest-ServersAdminNegativeTestJSON-1822127807-project-admin] Task: {'id': task-1950631, 'name': SuspendVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1657.662375] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1657.813242] env[63024]: DEBUG oslo_vmware.api [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1950629, 'name': PowerOnVM_Task, 'duration_secs': 0.774642} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1657.817514] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1657.817746] env[63024]: INFO nova.compute.manager [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Took 9.53 seconds to spawn the instance on the hypervisor. [ 1657.817982] env[63024]: DEBUG nova.compute.manager [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1657.821349] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fde6e3fc-408d-49cb-a489-ecadc0732a6c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1657.991544] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79997a6a-9eba-4932-8be2-1ef4ccf4575c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.002494] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e56bbb1-0ad8-419c-bcd4-dea2f7de15ac {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.037312] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5da4837d-e4d9-4758-8ce5-399f23f528b6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.040878] env[63024]: DEBUG nova.network.neutron [req-777b7e5f-6692-4e5e-b2c3-d5eb610b1363 req-33c86087-4629-4834-bb6c-e8c380d41568 service nova] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Updated VIF entry in instance network info cache for port 9f2961a8-afb7-4cf5-8517-52799f5c0bd1. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1658.041243] env[63024]: DEBUG nova.network.neutron [req-777b7e5f-6692-4e5e-b2c3-d5eb610b1363 req-33c86087-4629-4834-bb6c-e8c380d41568 service nova] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Updating instance_info_cache with network_info: [{"id": "9f2961a8-afb7-4cf5-8517-52799f5c0bd1", "address": "fa:16:3e:23:0f:27", "network": {"id": "0719de66-1f31-4596-a9a1-11d65b13c2e5", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1221667646-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "99c4328f2c8c4139b4eace4b465e37e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e0c77754-4085-434b-a3e8-d61be099ac67", "external-id": "nsx-vlan-transportzone-822", "segmentation_id": 822, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f2961a8-af", "ovs_interfaceid": "9f2961a8-afb7-4cf5-8517-52799f5c0bd1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1658.045693] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04862d5e-f94c-4d87-9333-2bc30eabf676 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.059426] env[63024]: DEBUG nova.compute.provider_tree [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1658.067911] env[63024]: DEBUG nova.compute.manager [None req-74e00bbc-9ded-41a3-ad3a-f6152c6eedcd tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1658.068853] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51857b28-f20d-479b-b1d0-43f79e4695bc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.083816] env[63024]: DEBUG oslo_vmware.api [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52872b17-dd36-37d1-256b-63d03ff070dd, 'name': SearchDatastore_Task, 'duration_secs': 0.01477} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1658.084067] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1658.084312] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 00e925a1-9b79-46e2-b7f7-c0b63e1e72df/00e925a1-9b79-46e2-b7f7-c0b63e1e72df.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1658.084569] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1658.084748] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1658.084938] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-162aa765-cdc0-493a-9803-e7394b55aac4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.086636] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f11a9ac3-73b8-4381-ba7a-b4bcef380fb3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.093164] env[63024]: DEBUG oslo_vmware.api [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Waiting for the task: (returnval){ [ 1658.093164] env[63024]: value = "task-1950632" [ 1658.093164] env[63024]: _type = "Task" [ 1658.093164] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1658.096687] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1658.096861] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1658.097870] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b36c1eb-b1fb-4525-984b-fa995bffbaf4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.102701] env[63024]: DEBUG oslo_vmware.api [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Task: {'id': task-1950632, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1658.105905] env[63024]: DEBUG oslo_vmware.api [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1658.105905] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52159126-befc-1a8d-b524-d0483fbd0f99" [ 1658.105905] env[63024]: _type = "Task" [ 1658.105905] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1658.113302] env[63024]: DEBUG oslo_vmware.api [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52159126-befc-1a8d-b524-d0483fbd0f99, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1658.128829] env[63024]: DEBUG oslo_vmware.api [None req-41d7e42d-f8b6-45ce-8217-48f00e73e146 tempest-ServersAdminNegativeTestJSON-1822127807 tempest-ServersAdminNegativeTestJSON-1822127807-project-admin] Task: {'id': task-1950631, 'name': SuspendVM_Task, 'duration_secs': 0.73647} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1658.129093] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-41d7e42d-f8b6-45ce-8217-48f00e73e146 tempest-ServersAdminNegativeTestJSON-1822127807 tempest-ServersAdminNegativeTestJSON-1822127807-project-admin] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Suspended the VM {{(pid=63024) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1658.129269] env[63024]: DEBUG nova.compute.manager [None req-41d7e42d-f8b6-45ce-8217-48f00e73e146 tempest-ServersAdminNegativeTestJSON-1822127807 tempest-ServersAdminNegativeTestJSON-1822127807-project-admin] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1658.130022] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dacb424-edbd-4b45-853f-6533398095de {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.345126] env[63024]: INFO nova.compute.manager [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Took 41.21 seconds to build instance. [ 1658.499829] env[63024]: DEBUG nova.compute.manager [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1658.522475] env[63024]: DEBUG nova.virt.hardware [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1658.522710] env[63024]: DEBUG nova.virt.hardware [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1658.522863] env[63024]: DEBUG nova.virt.hardware [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1658.523054] env[63024]: DEBUG nova.virt.hardware [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1658.523201] env[63024]: DEBUG nova.virt.hardware [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1658.523344] env[63024]: DEBUG nova.virt.hardware [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1658.523549] env[63024]: DEBUG nova.virt.hardware [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1658.523702] env[63024]: DEBUG nova.virt.hardware [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1658.524250] env[63024]: DEBUG nova.virt.hardware [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1658.524250] env[63024]: DEBUG nova.virt.hardware [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1658.524250] env[63024]: DEBUG nova.virt.hardware [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1658.525038] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0a19f50-b01c-4d39-9add-6809b13143f7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.533171] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94f872f8-262c-430b-99b0-9fb28014b4e1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.548029] env[63024]: DEBUG oslo_concurrency.lockutils [req-777b7e5f-6692-4e5e-b2c3-d5eb610b1363 req-33c86087-4629-4834-bb6c-e8c380d41568 service nova] Releasing lock "refresh_cache-1ad97ed0-2a84-4783-8511-e0f6b24861bd" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1658.548318] env[63024]: DEBUG nova.compute.manager [req-777b7e5f-6692-4e5e-b2c3-d5eb610b1363 req-33c86087-4629-4834-bb6c-e8c380d41568 service nova] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Received event network-vif-deleted-776bbe97-34ad-47f3-9045-81bb3c16a126 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1658.562850] env[63024]: DEBUG nova.scheduler.client.report [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1658.580722] env[63024]: INFO nova.compute.manager [None req-74e00bbc-9ded-41a3-ad3a-f6152c6eedcd tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] instance snapshotting [ 1658.586309] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1cc37f3-c85f-441d-bb84-9cffa2597ca9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.608280] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2784bbc7-5735-4dcd-bd5c-6e6845954715 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.618631] env[63024]: DEBUG oslo_vmware.api [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Task: {'id': task-1950632, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1658.624336] env[63024]: DEBUG oslo_vmware.api [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52159126-befc-1a8d-b524-d0483fbd0f99, 'name': SearchDatastore_Task, 'duration_secs': 0.041785} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1658.626709] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10a87394-5f82-427c-af72-47b53a6a6559 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1658.631851] env[63024]: DEBUG oslo_vmware.api [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1658.631851] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52974097-91ae-afa9-867e-2454df1a0e36" [ 1658.631851] env[63024]: _type = "Task" [ 1658.631851] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1658.644197] env[63024]: DEBUG oslo_vmware.api [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52974097-91ae-afa9-867e-2454df1a0e36, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1658.847170] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b20fc6a5-6413-4332-ad37-ab997d8247c0 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "e8ad74ce-7862-4574-98e7-14bc54bd5d6c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.407s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1659.068017] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.595s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1659.071262] env[63024]: DEBUG nova.compute.manager [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1659.072256] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6b228a5e-aba2-447e-b46a-ca69a68fc19f tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.882s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1659.072498] env[63024]: DEBUG nova.objects.instance [None req-6b228a5e-aba2-447e-b46a-ca69a68fc19f tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Lazy-loading 'resources' on Instance uuid e2138192-14e0-43d2-9d19-9820747d7217 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1659.115620] env[63024]: DEBUG oslo_vmware.api [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Task: {'id': task-1950632, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.95022} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1659.115865] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 00e925a1-9b79-46e2-b7f7-c0b63e1e72df/00e925a1-9b79-46e2-b7f7-c0b63e1e72df.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1659.116079] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1659.116317] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-596c36f3-d974-436c-b443-920d36e2cbb0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.123215] env[63024]: DEBUG oslo_vmware.api [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Waiting for the task: (returnval){ [ 1659.123215] env[63024]: value = "task-1950633" [ 1659.123215] env[63024]: _type = "Task" [ 1659.123215] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1659.128964] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-74e00bbc-9ded-41a3-ad3a-f6152c6eedcd tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Creating Snapshot of the VM instance {{(pid=63024) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1659.131132] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-a75625a8-e33f-4f53-afb5-ea601a8f13b2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.133058] env[63024]: DEBUG oslo_vmware.api [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Task: {'id': task-1950633, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1659.145131] env[63024]: DEBUG oslo_vmware.api [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52974097-91ae-afa9-867e-2454df1a0e36, 'name': SearchDatastore_Task, 'duration_secs': 0.015315} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1659.145131] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1659.145131] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 1ad97ed0-2a84-4783-8511-e0f6b24861bd/1ad97ed0-2a84-4783-8511-e0f6b24861bd.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1659.145131] env[63024]: DEBUG oslo_vmware.api [None req-74e00bbc-9ded-41a3-ad3a-f6152c6eedcd tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Waiting for the task: (returnval){ [ 1659.145131] env[63024]: value = "task-1950634" [ 1659.145131] env[63024]: _type = "Task" [ 1659.145131] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1659.145392] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-98b55d3d-2f27-496f-b654-635e9010dcd6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.156953] env[63024]: DEBUG oslo_vmware.api [None req-74e00bbc-9ded-41a3-ad3a-f6152c6eedcd tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950634, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1659.158453] env[63024]: DEBUG oslo_vmware.api [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1659.158453] env[63024]: value = "task-1950635" [ 1659.158453] env[63024]: _type = "Task" [ 1659.158453] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1659.170272] env[63024]: DEBUG oslo_vmware.api [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950635, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1659.350191] env[63024]: DEBUG nova.compute.manager [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1659.579926] env[63024]: DEBUG nova.compute.utils [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1659.585136] env[63024]: DEBUG nova.compute.manager [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1659.585136] env[63024]: DEBUG nova.network.neutron [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1659.641425] env[63024]: DEBUG oslo_vmware.api [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Task: {'id': task-1950633, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07876} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1659.641703] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1659.642517] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b00b7058-aa9b-47f6-bf42-6432476e37c1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.669864] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Reconfiguring VM instance instance-0000001c to attach disk [datastore1] 00e925a1-9b79-46e2-b7f7-c0b63e1e72df/00e925a1-9b79-46e2-b7f7-c0b63e1e72df.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1659.673993] env[63024]: DEBUG nova.policy [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1789ba92a3a44545a2bafb639692e8af', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eae1647405bf418ea6abde9723b2c895', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1659.679247] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-54f85a18-6621-4f2b-b980-4bdbcd976e31 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.701158] env[63024]: DEBUG nova.compute.manager [req-910c02fb-1838-4c5d-a27a-25c7054759a1 req-ef011505-a5ba-4dda-b839-03e806b34288 service nova] [instance: 610dd030-5080-498a-8744-b1411297d70d] Received event network-vif-plugged-9a7cae93-d7ad-4e7c-b18b-00e7e749299e {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1659.701158] env[63024]: DEBUG oslo_concurrency.lockutils [req-910c02fb-1838-4c5d-a27a-25c7054759a1 req-ef011505-a5ba-4dda-b839-03e806b34288 service nova] Acquiring lock "610dd030-5080-498a-8744-b1411297d70d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1659.701158] env[63024]: DEBUG oslo_concurrency.lockutils [req-910c02fb-1838-4c5d-a27a-25c7054759a1 req-ef011505-a5ba-4dda-b839-03e806b34288 service nova] Lock "610dd030-5080-498a-8744-b1411297d70d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1659.701158] env[63024]: DEBUG oslo_concurrency.lockutils [req-910c02fb-1838-4c5d-a27a-25c7054759a1 req-ef011505-a5ba-4dda-b839-03e806b34288 service nova] Lock "610dd030-5080-498a-8744-b1411297d70d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1659.701158] env[63024]: DEBUG nova.compute.manager [req-910c02fb-1838-4c5d-a27a-25c7054759a1 req-ef011505-a5ba-4dda-b839-03e806b34288 service nova] [instance: 610dd030-5080-498a-8744-b1411297d70d] No waiting events found dispatching network-vif-plugged-9a7cae93-d7ad-4e7c-b18b-00e7e749299e {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1659.701330] env[63024]: WARNING nova.compute.manager [req-910c02fb-1838-4c5d-a27a-25c7054759a1 req-ef011505-a5ba-4dda-b839-03e806b34288 service nova] [instance: 610dd030-5080-498a-8744-b1411297d70d] Received unexpected event network-vif-plugged-9a7cae93-d7ad-4e7c-b18b-00e7e749299e for instance with vm_state building and task_state spawning. [ 1659.701330] env[63024]: DEBUG oslo_vmware.api [None req-74e00bbc-9ded-41a3-ad3a-f6152c6eedcd tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950634, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1659.709885] env[63024]: DEBUG oslo_vmware.api [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950635, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.506391} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1659.713816] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 1ad97ed0-2a84-4783-8511-e0f6b24861bd/1ad97ed0-2a84-4783-8511-e0f6b24861bd.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1659.714050] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1659.714382] env[63024]: DEBUG oslo_vmware.api [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Waiting for the task: (returnval){ [ 1659.714382] env[63024]: value = "task-1950637" [ 1659.714382] env[63024]: _type = "Task" [ 1659.714382] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1659.714784] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4063eb27-1c46-4e43-a8f4-5f96aa40baa3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1659.727893] env[63024]: DEBUG oslo_vmware.api [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Task: {'id': task-1950637, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1659.734322] env[63024]: DEBUG oslo_vmware.api [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1659.734322] env[63024]: value = "task-1950638" [ 1659.734322] env[63024]: _type = "Task" [ 1659.734322] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1659.744043] env[63024]: DEBUG oslo_vmware.api [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950638, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1659.874151] env[63024]: DEBUG oslo_concurrency.lockutils [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1660.007276] env[63024]: DEBUG nova.network.neutron [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Successfully created port: 36bc0224-4621-4b20-b039-244da00236ac {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1660.088956] env[63024]: DEBUG nova.compute.manager [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1660.150319] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cd52e84-d2db-4d55-8b22-1ff7078bc042 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.166451] env[63024]: DEBUG oslo_vmware.api [None req-74e00bbc-9ded-41a3-ad3a-f6152c6eedcd tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950634, 'name': CreateSnapshot_Task, 'duration_secs': 0.865158} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1660.171087] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-74e00bbc-9ded-41a3-ad3a-f6152c6eedcd tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Created Snapshot of the VM instance {{(pid=63024) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1660.171984] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3667a3a9-84b6-4d94-ae2d-54c862ed396b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.179214] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb99a791-4a87-4a75-b4b2-f597f01a0487 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.231653] env[63024]: DEBUG nova.network.neutron [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Successfully updated port: 9a7cae93-d7ad-4e7c-b18b-00e7e749299e {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1660.243732] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-74e00bbc-9ded-41a3-ad3a-f6152c6eedcd tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Creating linked-clone VM from snapshot {{(pid=63024) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1660.247411] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c9482a3-c7e0-4123-a068-f3aefcc0e6e4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.251220] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-d24f1088-0d0b-4431-b141-216f84acaeaf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.254639] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "refresh_cache-610dd030-5080-498a-8744-b1411297d70d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1660.254809] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquired lock "refresh_cache-610dd030-5080-498a-8744-b1411297d70d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1660.254962] env[63024]: DEBUG nova.network.neutron [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1660.268224] env[63024]: DEBUG oslo_vmware.api [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950638, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.195687} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1660.273166] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8432cd98-45d0-4ffc-8d99-252d6ff2806f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.277840] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1660.278510] env[63024]: DEBUG oslo_vmware.api [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Task: {'id': task-1950637, 'name': ReconfigVM_Task, 'duration_secs': 0.3089} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1660.278783] env[63024]: DEBUG oslo_vmware.api [None req-74e00bbc-9ded-41a3-ad3a-f6152c6eedcd tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Waiting for the task: (returnval){ [ 1660.278783] env[63024]: value = "task-1950639" [ 1660.278783] env[63024]: _type = "Task" [ 1660.278783] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1660.279494] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b0ae6ed-01c8-4999-b16c-49da831b073b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.282157] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Reconfigured VM instance instance-0000001c to attach disk [datastore1] 00e925a1-9b79-46e2-b7f7-c0b63e1e72df/00e925a1-9b79-46e2-b7f7-c0b63e1e72df.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1660.283737] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8d83b0ad-55df-4d45-a124-2212fc56797a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.298214] env[63024]: DEBUG nova.compute.provider_tree [None req-6b228a5e-aba2-447e-b46a-ca69a68fc19f tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1660.327641] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Reconfiguring VM instance instance-0000001d to attach disk [datastore1] 1ad97ed0-2a84-4783-8511-e0f6b24861bd/1ad97ed0-2a84-4783-8511-e0f6b24861bd.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1660.334021] env[63024]: DEBUG nova.scheduler.client.report [None req-6b228a5e-aba2-447e-b46a-ca69a68fc19f tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1660.337829] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-db28fa45-da0c-4835-adde-acecf806efe1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.360695] env[63024]: DEBUG oslo_vmware.api [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Waiting for the task: (returnval){ [ 1660.360695] env[63024]: value = "task-1950640" [ 1660.360695] env[63024]: _type = "Task" [ 1660.360695] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1660.360926] env[63024]: DEBUG oslo_vmware.api [None req-74e00bbc-9ded-41a3-ad3a-f6152c6eedcd tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950639, 'name': CloneVM_Task} progress is 23%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.369937] env[63024]: DEBUG oslo_vmware.api [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1660.369937] env[63024]: value = "task-1950641" [ 1660.369937] env[63024]: _type = "Task" [ 1660.369937] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1660.377341] env[63024]: DEBUG oslo_vmware.api [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Task: {'id': task-1950640, 'name': Rename_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.386635] env[63024]: DEBUG oslo_vmware.api [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950641, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.795345] env[63024]: DEBUG oslo_vmware.api [None req-74e00bbc-9ded-41a3-ad3a-f6152c6eedcd tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950639, 'name': CloneVM_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.802950] env[63024]: DEBUG nova.network.neutron [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1660.862377] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6b228a5e-aba2-447e-b46a-ca69a68fc19f tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.791s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1660.864741] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e2912dc9-5a6f-41f1-b223-953eb8bd64f9 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 27.304s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1660.884097] env[63024]: DEBUG oslo_vmware.api [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Task: {'id': task-1950640, 'name': Rename_Task, 'duration_secs': 0.151236} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1660.884807] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1660.885108] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8f788cb2-1815-4efc-8969-e3b22230d24d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1660.893753] env[63024]: DEBUG oslo_vmware.api [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950641, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1660.894777] env[63024]: INFO nova.scheduler.client.report [None req-6b228a5e-aba2-447e-b46a-ca69a68fc19f tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Deleted allocations for instance e2138192-14e0-43d2-9d19-9820747d7217 [ 1660.903799] env[63024]: DEBUG oslo_vmware.api [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Waiting for the task: (returnval){ [ 1660.903799] env[63024]: value = "task-1950642" [ 1660.903799] env[63024]: _type = "Task" [ 1660.903799] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1660.911771] env[63024]: DEBUG oslo_vmware.api [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Task: {'id': task-1950642, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.047303] env[63024]: DEBUG nova.network.neutron [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Updating instance_info_cache with network_info: [{"id": "9a7cae93-d7ad-4e7c-b18b-00e7e749299e", "address": "fa:16:3e:01:9d:f0", "network": {"id": "67ab6461-8fa5-4dfd-9267-561a710ae91b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1814728209-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1886be852b01400aaf7a31c8fe5d4d7a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a7cae93-d7", "ovs_interfaceid": "9a7cae93-d7ad-4e7c-b18b-00e7e749299e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1661.104024] env[63024]: DEBUG nova.compute.manager [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1661.127887] env[63024]: DEBUG nova.virt.hardware [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1661.128137] env[63024]: DEBUG nova.virt.hardware [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1661.128360] env[63024]: DEBUG nova.virt.hardware [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1661.128557] env[63024]: DEBUG nova.virt.hardware [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1661.128708] env[63024]: DEBUG nova.virt.hardware [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1661.128876] env[63024]: DEBUG nova.virt.hardware [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1661.129131] env[63024]: DEBUG nova.virt.hardware [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1661.129275] env[63024]: DEBUG nova.virt.hardware [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1661.129444] env[63024]: DEBUG nova.virt.hardware [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1661.129607] env[63024]: DEBUG nova.virt.hardware [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1661.129920] env[63024]: DEBUG nova.virt.hardware [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1661.131141] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d28269d-c7b3-4d51-b09c-5122cf3ab106 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.138925] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-352219db-3a47-43a0-be4b-8f92fd87bfc0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.296194] env[63024]: DEBUG oslo_vmware.api [None req-74e00bbc-9ded-41a3-ad3a-f6152c6eedcd tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950639, 'name': CloneVM_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.381540] env[63024]: DEBUG oslo_vmware.api [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950641, 'name': ReconfigVM_Task, 'duration_secs': 0.795848} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1661.381540] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Reconfigured VM instance instance-0000001d to attach disk [datastore1] 1ad97ed0-2a84-4783-8511-e0f6b24861bd/1ad97ed0-2a84-4783-8511-e0f6b24861bd.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1661.381947] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-97ab0d6e-84c6-43f4-aaac-e573132ed534 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.392022] env[63024]: DEBUG oslo_vmware.api [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1661.392022] env[63024]: value = "task-1950644" [ 1661.392022] env[63024]: _type = "Task" [ 1661.392022] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1661.404155] env[63024]: DEBUG oslo_vmware.api [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950644, 'name': Rename_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.407367] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6b228a5e-aba2-447e-b46a-ca69a68fc19f tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Lock "e2138192-14e0-43d2-9d19-9820747d7217" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.033s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1661.415249] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cdec4f1c-4040-4138-93d8-90d551e39ffe tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Lock "e2138192-14e0-43d2-9d19-9820747d7217" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 25.300s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1661.415249] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cdec4f1c-4040-4138-93d8-90d551e39ffe tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Acquiring lock "e2138192-14e0-43d2-9d19-9820747d7217-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1661.415249] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cdec4f1c-4040-4138-93d8-90d551e39ffe tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Lock "e2138192-14e0-43d2-9d19-9820747d7217-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1661.415249] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cdec4f1c-4040-4138-93d8-90d551e39ffe tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Lock "e2138192-14e0-43d2-9d19-9820747d7217-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1661.415926] env[63024]: INFO nova.compute.manager [None req-cdec4f1c-4040-4138-93d8-90d551e39ffe tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Terminating instance [ 1661.425604] env[63024]: DEBUG oslo_vmware.api [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Task: {'id': task-1950642, 'name': PowerOnVM_Task, 'duration_secs': 0.496408} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1661.426649] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1661.426649] env[63024]: INFO nova.compute.manager [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Took 9.35 seconds to spawn the instance on the hypervisor. [ 1661.426649] env[63024]: DEBUG nova.compute.manager [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1661.427591] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1306d45d-dc17-4ccf-b143-1230d8370ac3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.507858] env[63024]: DEBUG nova.compute.manager [req-17cf9db7-6a0b-4d47-87ee-239f6abd5110 req-7f17331a-f62a-40eb-87e8-196f8497c25d service nova] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Received event network-vif-plugged-36bc0224-4621-4b20-b039-244da00236ac {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1661.508111] env[63024]: DEBUG oslo_concurrency.lockutils [req-17cf9db7-6a0b-4d47-87ee-239f6abd5110 req-7f17331a-f62a-40eb-87e8-196f8497c25d service nova] Acquiring lock "8a826350-0fee-409d-a3fc-260d7d43bdf6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1661.508340] env[63024]: DEBUG oslo_concurrency.lockutils [req-17cf9db7-6a0b-4d47-87ee-239f6abd5110 req-7f17331a-f62a-40eb-87e8-196f8497c25d service nova] Lock "8a826350-0fee-409d-a3fc-260d7d43bdf6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1661.508512] env[63024]: DEBUG oslo_concurrency.lockutils [req-17cf9db7-6a0b-4d47-87ee-239f6abd5110 req-7f17331a-f62a-40eb-87e8-196f8497c25d service nova] Lock "8a826350-0fee-409d-a3fc-260d7d43bdf6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1661.508695] env[63024]: DEBUG nova.compute.manager [req-17cf9db7-6a0b-4d47-87ee-239f6abd5110 req-7f17331a-f62a-40eb-87e8-196f8497c25d service nova] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] No waiting events found dispatching network-vif-plugged-36bc0224-4621-4b20-b039-244da00236ac {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1661.508893] env[63024]: WARNING nova.compute.manager [req-17cf9db7-6a0b-4d47-87ee-239f6abd5110 req-7f17331a-f62a-40eb-87e8-196f8497c25d service nova] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Received unexpected event network-vif-plugged-36bc0224-4621-4b20-b039-244da00236ac for instance with vm_state building and task_state spawning. [ 1661.551029] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Releasing lock "refresh_cache-610dd030-5080-498a-8744-b1411297d70d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1661.551364] env[63024]: DEBUG nova.compute.manager [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Instance network_info: |[{"id": "9a7cae93-d7ad-4e7c-b18b-00e7e749299e", "address": "fa:16:3e:01:9d:f0", "network": {"id": "67ab6461-8fa5-4dfd-9267-561a710ae91b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1814728209-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1886be852b01400aaf7a31c8fe5d4d7a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a7cae93-d7", "ovs_interfaceid": "9a7cae93-d7ad-4e7c-b18b-00e7e749299e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1661.552668] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:01:9d:f0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'abe48956-848a-4e1f-b1f1-a27baa5390b9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9a7cae93-d7ad-4e7c-b18b-00e7e749299e', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1661.560531] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Creating folder: Project (1886be852b01400aaf7a31c8fe5d4d7a). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1661.560639] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a7f4c3e8-57ba-4a26-8626-753f0d81bb8d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.571659] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Created folder: Project (1886be852b01400aaf7a31c8fe5d4d7a) in parent group-v401959. [ 1661.571864] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Creating folder: Instances. Parent ref: group-v402051. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1661.572107] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2a5b279e-5289-4cdb-8d5b-bfa6f61cdae8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.580755] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Created folder: Instances in parent group-v402051. [ 1661.581026] env[63024]: DEBUG oslo.service.loopingcall [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1661.581353] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 610dd030-5080-498a-8744-b1411297d70d] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1661.583946] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-830b181a-a405-4a5d-9bff-494e780615f1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.601655] env[63024]: DEBUG nova.network.neutron [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Successfully updated port: 36bc0224-4621-4b20-b039-244da00236ac {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1661.608699] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1661.608699] env[63024]: value = "task-1950647" [ 1661.608699] env[63024]: _type = "Task" [ 1661.608699] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1661.622944] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950647, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1661.795528] env[63024]: DEBUG oslo_vmware.api [None req-74e00bbc-9ded-41a3-ad3a-f6152c6eedcd tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950639, 'name': CloneVM_Task, 'duration_secs': 1.448941} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1661.795808] env[63024]: INFO nova.virt.vmwareapi.vmops [None req-74e00bbc-9ded-41a3-ad3a-f6152c6eedcd tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Created linked-clone VM from snapshot [ 1661.796570] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-328d2731-cecd-4275-a351-c047ab5171b3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.809514] env[63024]: DEBUG nova.virt.vmwareapi.images [None req-74e00bbc-9ded-41a3-ad3a-f6152c6eedcd tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Uploading image 1f6dc89a-45dd-4296-a06c-4feac41636c3 {{(pid=63024) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1661.828073] env[63024]: DEBUG nova.compute.manager [req-b8dce721-105c-4a98-b7b5-5539b4e0d79e req-21cc4945-db15-45e5-b376-d6f7a70a9d90 service nova] [instance: 610dd030-5080-498a-8744-b1411297d70d] Received event network-changed-9a7cae93-d7ad-4e7c-b18b-00e7e749299e {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1661.828266] env[63024]: DEBUG nova.compute.manager [req-b8dce721-105c-4a98-b7b5-5539b4e0d79e req-21cc4945-db15-45e5-b376-d6f7a70a9d90 service nova] [instance: 610dd030-5080-498a-8744-b1411297d70d] Refreshing instance network info cache due to event network-changed-9a7cae93-d7ad-4e7c-b18b-00e7e749299e. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1661.828572] env[63024]: DEBUG oslo_concurrency.lockutils [req-b8dce721-105c-4a98-b7b5-5539b4e0d79e req-21cc4945-db15-45e5-b376-d6f7a70a9d90 service nova] Acquiring lock "refresh_cache-610dd030-5080-498a-8744-b1411297d70d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1661.828662] env[63024]: DEBUG oslo_concurrency.lockutils [req-b8dce721-105c-4a98-b7b5-5539b4e0d79e req-21cc4945-db15-45e5-b376-d6f7a70a9d90 service nova] Acquired lock "refresh_cache-610dd030-5080-498a-8744-b1411297d70d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1661.828938] env[63024]: DEBUG nova.network.neutron [req-b8dce721-105c-4a98-b7b5-5539b4e0d79e req-21cc4945-db15-45e5-b376-d6f7a70a9d90 service nova] [instance: 610dd030-5080-498a-8744-b1411297d70d] Refreshing network info cache for port 9a7cae93-d7ad-4e7c-b18b-00e7e749299e {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1661.852566] env[63024]: DEBUG oslo_vmware.rw_handles [None req-74e00bbc-9ded-41a3-ad3a-f6152c6eedcd tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1661.852566] env[63024]: value = "vm-402050" [ 1661.852566] env[63024]: _type = "VirtualMachine" [ 1661.852566] env[63024]: }. {{(pid=63024) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1661.852870] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-442241c2-d930-46ef-bc2b-a6fa67d69936 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.860217] env[63024]: DEBUG oslo_vmware.rw_handles [None req-74e00bbc-9ded-41a3-ad3a-f6152c6eedcd tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Lease: (returnval){ [ 1661.860217] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]527b7a63-7d64-9f94-ba94-d4d9b2b48ac4" [ 1661.860217] env[63024]: _type = "HttpNfcLease" [ 1661.860217] env[63024]: } obtained for exporting VM: (result){ [ 1661.860217] env[63024]: value = "vm-402050" [ 1661.860217] env[63024]: _type = "VirtualMachine" [ 1661.860217] env[63024]: }. {{(pid=63024) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1661.860731] env[63024]: DEBUG oslo_vmware.api [None req-74e00bbc-9ded-41a3-ad3a-f6152c6eedcd tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Waiting for the lease: (returnval){ [ 1661.860731] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]527b7a63-7d64-9f94-ba94-d4d9b2b48ac4" [ 1661.860731] env[63024]: _type = "HttpNfcLease" [ 1661.860731] env[63024]: } to be ready. {{(pid=63024) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1661.874090] env[63024]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1661.874090] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]527b7a63-7d64-9f94-ba94-d4d9b2b48ac4" [ 1661.874090] env[63024]: _type = "HttpNfcLease" [ 1661.874090] env[63024]: } is ready. {{(pid=63024) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1661.874665] env[63024]: DEBUG oslo_vmware.rw_handles [None req-74e00bbc-9ded-41a3-ad3a-f6152c6eedcd tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1661.874665] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]527b7a63-7d64-9f94-ba94-d4d9b2b48ac4" [ 1661.874665] env[63024]: _type = "HttpNfcLease" [ 1661.874665] env[63024]: }. {{(pid=63024) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1661.875284] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54af903a-205a-4d3c-9e70-f9561d342668 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.879243] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab3536f9-6c8a-4168-a23a-44aada027abc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.886304] env[63024]: DEBUG oslo_vmware.rw_handles [None req-74e00bbc-9ded-41a3-ad3a-f6152c6eedcd tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526987df-4e38-9eb4-d7c2-8ab81847f04a/disk-0.vmdk from lease info. {{(pid=63024) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1661.886528] env[63024]: DEBUG oslo_vmware.rw_handles [None req-74e00bbc-9ded-41a3-ad3a-f6152c6eedcd tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526987df-4e38-9eb4-d7c2-8ab81847f04a/disk-0.vmdk for reading. {{(pid=63024) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1661.890673] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f70903e-d171-407e-ac1c-c687930d4b64 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1661.950564] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cdec4f1c-4040-4138-93d8-90d551e39ffe tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Acquiring lock "refresh_cache-e2138192-14e0-43d2-9d19-9820747d7217" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1661.950825] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cdec4f1c-4040-4138-93d8-90d551e39ffe tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Acquired lock "refresh_cache-e2138192-14e0-43d2-9d19-9820747d7217" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1661.951079] env[63024]: DEBUG nova.network.neutron [None req-cdec4f1c-4040-4138-93d8-90d551e39ffe tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1662.005918] env[63024]: INFO nova.compute.manager [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Took 39.76 seconds to build instance. [ 1662.008192] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e6138a81-3a08-4fc5-99c4-2b2ab88def37 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Acquiring lock "ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1662.008427] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e6138a81-3a08-4fc5-99c4-2b2ab88def37 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Lock "ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1662.008619] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e6138a81-3a08-4fc5-99c4-2b2ab88def37 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Acquiring lock "ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1662.008970] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e6138a81-3a08-4fc5-99c4-2b2ab88def37 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Lock "ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1662.008970] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e6138a81-3a08-4fc5-99c4-2b2ab88def37 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Lock "ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1662.013603] env[63024]: INFO nova.compute.manager [None req-e6138a81-3a08-4fc5-99c4-2b2ab88def37 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Terminating instance [ 1662.019024] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a61b9f8e-bd17-4737-af80-3a8ebdbf4a56 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.022243] env[63024]: DEBUG oslo_vmware.api [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950644, 'name': Rename_Task, 'duration_secs': 0.346472} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1662.023696] env[63024]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-4baad158-3d56-4fde-9770-629b80f74527 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.025069] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1662.025810] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5b734937-c3c5-44a8-8e42-044a569e1cf9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.031633] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4336861b-dae5-4c18-88d1-63ed2afaefbf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.037177] env[63024]: DEBUG oslo_vmware.api [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1662.037177] env[63024]: value = "task-1950649" [ 1662.037177] env[63024]: _type = "Task" [ 1662.037177] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1662.048932] env[63024]: DEBUG nova.compute.provider_tree [None req-e2912dc9-5a6f-41f1-b223-953eb8bd64f9 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1662.054871] env[63024]: DEBUG oslo_vmware.api [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950649, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.105194] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Acquiring lock "refresh_cache-8a826350-0fee-409d-a3fc-260d7d43bdf6" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1662.105194] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Acquired lock "refresh_cache-8a826350-0fee-409d-a3fc-260d7d43bdf6" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1662.105194] env[63024]: DEBUG nova.network.neutron [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1662.118520] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950647, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.459064] env[63024]: DEBUG nova.compute.utils [None req-cdec4f1c-4040-4138-93d8-90d551e39ffe tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Can not refresh info_cache because instance was not found {{(pid=63024) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1024}} [ 1662.472588] env[63024]: DEBUG nova.compute.manager [None req-1b63b3a1-4b4b-438b-ae3f-a856ca023800 tempest-ServerDiagnosticsTest-2127734892 tempest-ServerDiagnosticsTest-2127734892-project-admin] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1662.474419] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a3fb47e-03cc-4acc-a249-49c397fdb551 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.485177] env[63024]: INFO nova.compute.manager [None req-1b63b3a1-4b4b-438b-ae3f-a856ca023800 tempest-ServerDiagnosticsTest-2127734892 tempest-ServerDiagnosticsTest-2127734892-project-admin] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Retrieving diagnostics [ 1662.486104] env[63024]: DEBUG nova.network.neutron [None req-cdec4f1c-4040-4138-93d8-90d551e39ffe tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1662.492019] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be3cf805-8782-4050-b1e4-378097a5ac61 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.525658] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3666f641-b513-4c2f-b0e5-f2a1fb2cd8d4 tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Lock "00e925a1-9b79-46e2-b7f7-c0b63e1e72df" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.024s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1662.530955] env[63024]: DEBUG nova.compute.manager [None req-e6138a81-3a08-4fc5-99c4-2b2ab88def37 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1662.531451] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e6138a81-3a08-4fc5-99c4-2b2ab88def37 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1662.533116] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85ce5498-f562-446b-a5d5-69a0642f75de {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.544872] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e6138a81-3a08-4fc5-99c4-2b2ab88def37 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1662.545459] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-04d10cab-45cb-4b57-8394-44c0b8157857 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.550711] env[63024]: DEBUG oslo_vmware.api [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950649, 'name': PowerOnVM_Task} progress is 1%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.551922] env[63024]: DEBUG nova.scheduler.client.report [None req-e2912dc9-5a6f-41f1-b223-953eb8bd64f9 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1662.590687] env[63024]: DEBUG nova.network.neutron [None req-cdec4f1c-4040-4138-93d8-90d551e39ffe tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1662.621849] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950647, 'name': CreateVM_Task, 'duration_secs': 0.593371} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1662.622075] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 610dd030-5080-498a-8744-b1411297d70d] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1662.622716] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1662.623090] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1662.623601] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1662.624439] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dfffa9e5-9e52-4c37-a30f-036e76be5241 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.630157] env[63024]: DEBUG oslo_vmware.api [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1662.630157] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]527e3284-006f-7bea-4363-2007f207ec62" [ 1662.630157] env[63024]: _type = "Task" [ 1662.630157] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1662.635104] env[63024]: DEBUG nova.network.neutron [req-b8dce721-105c-4a98-b7b5-5539b4e0d79e req-21cc4945-db15-45e5-b376-d6f7a70a9d90 service nova] [instance: 610dd030-5080-498a-8744-b1411297d70d] Updated VIF entry in instance network info cache for port 9a7cae93-d7ad-4e7c-b18b-00e7e749299e. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1662.635444] env[63024]: DEBUG nova.network.neutron [req-b8dce721-105c-4a98-b7b5-5539b4e0d79e req-21cc4945-db15-45e5-b376-d6f7a70a9d90 service nova] [instance: 610dd030-5080-498a-8744-b1411297d70d] Updating instance_info_cache with network_info: [{"id": "9a7cae93-d7ad-4e7c-b18b-00e7e749299e", "address": "fa:16:3e:01:9d:f0", "network": {"id": "67ab6461-8fa5-4dfd-9267-561a710ae91b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1814728209-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1886be852b01400aaf7a31c8fe5d4d7a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a7cae93-d7", "ovs_interfaceid": "9a7cae93-d7ad-4e7c-b18b-00e7e749299e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1662.643835] env[63024]: DEBUG oslo_vmware.api [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]527e3284-006f-7bea-4363-2007f207ec62, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.659127] env[63024]: DEBUG nova.network.neutron [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1662.708336] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e6138a81-3a08-4fc5-99c4-2b2ab88def37 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1662.708733] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e6138a81-3a08-4fc5-99c4-2b2ab88def37 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1662.709139] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6138a81-3a08-4fc5-99c4-2b2ab88def37 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Deleting the datastore file [datastore1] ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1662.712735] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f20bfe3c-8f72-4a88-b020-d9911ef7bce1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1662.719619] env[63024]: DEBUG oslo_vmware.api [None req-e6138a81-3a08-4fc5-99c4-2b2ab88def37 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Waiting for the task: (returnval){ [ 1662.719619] env[63024]: value = "task-1950651" [ 1662.719619] env[63024]: _type = "Task" [ 1662.719619] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1662.728811] env[63024]: DEBUG oslo_vmware.api [None req-e6138a81-3a08-4fc5-99c4-2b2ab88def37 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Task: {'id': task-1950651, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1662.837073] env[63024]: DEBUG nova.network.neutron [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Updating instance_info_cache with network_info: [{"id": "36bc0224-4621-4b20-b039-244da00236ac", "address": "fa:16:3e:28:b4:46", "network": {"id": "ab62b987-c074-462f-ab8f-d851463f3ed7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1458114320-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eae1647405bf418ea6abde9723b2c895", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "604c9724-b4ef-4393-a76e-eb4a2b510796", "external-id": "nsx-vlan-transportzone-909", "segmentation_id": 909, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36bc0224-46", "ovs_interfaceid": "36bc0224-4621-4b20-b039-244da00236ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1663.047937] env[63024]: DEBUG oslo_vmware.api [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950649, 'name': PowerOnVM_Task} progress is 86%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.093721] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cdec4f1c-4040-4138-93d8-90d551e39ffe tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Releasing lock "refresh_cache-e2138192-14e0-43d2-9d19-9820747d7217" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1663.094205] env[63024]: DEBUG nova.compute.manager [None req-cdec4f1c-4040-4138-93d8-90d551e39ffe tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1663.094593] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-cdec4f1c-4040-4138-93d8-90d551e39ffe tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1663.094882] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f9d4738c-64b1-44cb-9771-4c955c9f8808 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.105981] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-178f54a5-656a-48b4-95ed-61752b07dba0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.142953] env[63024]: DEBUG oslo_concurrency.lockutils [req-b8dce721-105c-4a98-b7b5-5539b4e0d79e req-21cc4945-db15-45e5-b376-d6f7a70a9d90 service nova] Releasing lock "refresh_cache-610dd030-5080-498a-8744-b1411297d70d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1663.143352] env[63024]: DEBUG nova.compute.manager [req-b8dce721-105c-4a98-b7b5-5539b4e0d79e req-21cc4945-db15-45e5-b376-d6f7a70a9d90 service nova] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Received event network-changed-209c547a-fef6-4e81-9221-59b72099faa5 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1663.143601] env[63024]: DEBUG nova.compute.manager [req-b8dce721-105c-4a98-b7b5-5539b4e0d79e req-21cc4945-db15-45e5-b376-d6f7a70a9d90 service nova] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Refreshing instance network info cache due to event network-changed-209c547a-fef6-4e81-9221-59b72099faa5. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1663.143909] env[63024]: DEBUG oslo_concurrency.lockutils [req-b8dce721-105c-4a98-b7b5-5539b4e0d79e req-21cc4945-db15-45e5-b376-d6f7a70a9d90 service nova] Acquiring lock "refresh_cache-e8ad74ce-7862-4574-98e7-14bc54bd5d6c" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1663.144171] env[63024]: DEBUG oslo_concurrency.lockutils [req-b8dce721-105c-4a98-b7b5-5539b4e0d79e req-21cc4945-db15-45e5-b376-d6f7a70a9d90 service nova] Acquired lock "refresh_cache-e8ad74ce-7862-4574-98e7-14bc54bd5d6c" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1663.144394] env[63024]: DEBUG nova.network.neutron [req-b8dce721-105c-4a98-b7b5-5539b4e0d79e req-21cc4945-db15-45e5-b376-d6f7a70a9d90 service nova] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Refreshing network info cache for port 209c547a-fef6-4e81-9221-59b72099faa5 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1663.145943] env[63024]: WARNING nova.virt.vmwareapi.vmops [None req-cdec4f1c-4040-4138-93d8-90d551e39ffe tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e2138192-14e0-43d2-9d19-9820747d7217 could not be found. [ 1663.146123] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-cdec4f1c-4040-4138-93d8-90d551e39ffe tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1663.146350] env[63024]: INFO nova.compute.manager [None req-cdec4f1c-4040-4138-93d8-90d551e39ffe tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1663.146656] env[63024]: DEBUG oslo.service.loopingcall [None req-cdec4f1c-4040-4138-93d8-90d551e39ffe tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1663.150530] env[63024]: DEBUG nova.compute.manager [-] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1663.150530] env[63024]: DEBUG nova.network.neutron [-] [instance: e2138192-14e0-43d2-9d19-9820747d7217] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1663.159203] env[63024]: DEBUG oslo_vmware.api [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]527e3284-006f-7bea-4363-2007f207ec62, 'name': SearchDatastore_Task, 'duration_secs': 0.017753} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1663.159885] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1663.160042] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1663.160317] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1663.160605] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1663.160876] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1663.161576] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3da27615-c12c-4c84-bb80-607b9212fea3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.171838] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1663.172157] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1663.173914] env[63024]: DEBUG nova.network.neutron [-] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1663.174961] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8f8f285-61d8-436f-9a1b-c6f695772d71 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.185863] env[63024]: DEBUG oslo_vmware.api [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1663.185863] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52a06757-6352-9ba6-6146-3181c8e7201a" [ 1663.185863] env[63024]: _type = "Task" [ 1663.185863] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.196876] env[63024]: DEBUG oslo_vmware.api [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52a06757-6352-9ba6-6146-3181c8e7201a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.231148] env[63024]: DEBUG oslo_vmware.api [None req-e6138a81-3a08-4fc5-99c4-2b2ab88def37 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Task: {'id': task-1950651, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.217767} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1663.231764] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6138a81-3a08-4fc5-99c4-2b2ab88def37 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1663.233788] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e6138a81-3a08-4fc5-99c4-2b2ab88def37 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1663.233788] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e6138a81-3a08-4fc5-99c4-2b2ab88def37 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1663.233788] env[63024]: INFO nova.compute.manager [None req-e6138a81-3a08-4fc5-99c4-2b2ab88def37 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Took 0.70 seconds to destroy the instance on the hypervisor. [ 1663.233788] env[63024]: DEBUG oslo.service.loopingcall [None req-e6138a81-3a08-4fc5-99c4-2b2ab88def37 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1663.233788] env[63024]: DEBUG nova.compute.manager [-] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1663.233788] env[63024]: DEBUG nova.network.neutron [-] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1663.326257] env[63024]: DEBUG oslo_concurrency.lockutils [None req-19d2b9d0-4aa4-47b3-85f8-fc2fcc925cbf tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Acquiring lock "b629b4f8-f79f-4361-b78c-8705a6888a9e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1663.326533] env[63024]: DEBUG oslo_concurrency.lockutils [None req-19d2b9d0-4aa4-47b3-85f8-fc2fcc925cbf tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Lock "b629b4f8-f79f-4361-b78c-8705a6888a9e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1663.326735] env[63024]: DEBUG oslo_concurrency.lockutils [None req-19d2b9d0-4aa4-47b3-85f8-fc2fcc925cbf tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Acquiring lock "b629b4f8-f79f-4361-b78c-8705a6888a9e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1663.326918] env[63024]: DEBUG oslo_concurrency.lockutils [None req-19d2b9d0-4aa4-47b3-85f8-fc2fcc925cbf tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Lock "b629b4f8-f79f-4361-b78c-8705a6888a9e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1663.327104] env[63024]: DEBUG oslo_concurrency.lockutils [None req-19d2b9d0-4aa4-47b3-85f8-fc2fcc925cbf tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Lock "b629b4f8-f79f-4361-b78c-8705a6888a9e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1663.329806] env[63024]: INFO nova.compute.manager [None req-19d2b9d0-4aa4-47b3-85f8-fc2fcc925cbf tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Terminating instance [ 1663.340034] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Releasing lock "refresh_cache-8a826350-0fee-409d-a3fc-260d7d43bdf6" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1663.340426] env[63024]: DEBUG nova.compute.manager [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Instance network_info: |[{"id": "36bc0224-4621-4b20-b039-244da00236ac", "address": "fa:16:3e:28:b4:46", "network": {"id": "ab62b987-c074-462f-ab8f-d851463f3ed7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1458114320-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eae1647405bf418ea6abde9723b2c895", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "604c9724-b4ef-4393-a76e-eb4a2b510796", "external-id": "nsx-vlan-transportzone-909", "segmentation_id": 909, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36bc0224-46", "ovs_interfaceid": "36bc0224-4621-4b20-b039-244da00236ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1663.341233] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:28:b4:46', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '604c9724-b4ef-4393-a76e-eb4a2b510796', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '36bc0224-4621-4b20-b039-244da00236ac', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1663.352300] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Creating folder: Project (eae1647405bf418ea6abde9723b2c895). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1663.352300] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-490419d1-1a57-472b-b62a-92ed7f0b129b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.360469] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Created folder: Project (eae1647405bf418ea6abde9723b2c895) in parent group-v401959. [ 1663.360576] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Creating folder: Instances. Parent ref: group-v402054. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1663.360837] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a79c1499-bdd7-46cd-b0a7-a5917f0e3646 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.376561] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Created folder: Instances in parent group-v402054. [ 1663.377472] env[63024]: DEBUG oslo.service.loopingcall [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1663.377472] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1663.377472] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-98451ef1-70cd-4716-aa3a-980c3e40ad7c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.400643] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1663.400643] env[63024]: value = "task-1950655" [ 1663.400643] env[63024]: _type = "Task" [ 1663.400643] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.408583] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950655, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.550042] env[63024]: DEBUG oslo_vmware.api [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950649, 'name': PowerOnVM_Task, 'duration_secs': 1.085748} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1663.554026] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1663.556039] env[63024]: INFO nova.compute.manager [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Took 8.81 seconds to spawn the instance on the hypervisor. [ 1663.556039] env[63024]: DEBUG nova.compute.manager [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1663.556039] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50ed0af3-c143-4d84-9d96-e6db6fec9841 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.563305] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e2912dc9-5a6f-41f1-b223-953eb8bd64f9 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.699s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1663.569755] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 28.854s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1663.570157] env[63024]: DEBUG nova.objects.instance [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63024) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1663.684801] env[63024]: DEBUG nova.network.neutron [-] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1663.704820] env[63024]: DEBUG oslo_vmware.api [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52a06757-6352-9ba6-6146-3181c8e7201a, 'name': SearchDatastore_Task, 'duration_secs': 0.017225} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1663.706188] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-534406bd-657d-4aab-855e-b549b456ffaa {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.716773] env[63024]: DEBUG oslo_vmware.api [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1663.716773] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52a91478-adf8-a0c2-c9f9-f2c9f9aed245" [ 1663.716773] env[63024]: _type = "Task" [ 1663.716773] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.726435] env[63024]: DEBUG oslo_vmware.api [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52a91478-adf8-a0c2-c9f9-f2c9f9aed245, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.765407] env[63024]: DEBUG nova.compute.manager [req-54b005bc-45d0-4d50-86d2-23c6c93d456e req-16830189-559c-43bb-af95-32e4354a3f11 service nova] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Received event network-changed-36bc0224-4621-4b20-b039-244da00236ac {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1663.765407] env[63024]: DEBUG nova.compute.manager [req-54b005bc-45d0-4d50-86d2-23c6c93d456e req-16830189-559c-43bb-af95-32e4354a3f11 service nova] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Refreshing instance network info cache due to event network-changed-36bc0224-4621-4b20-b039-244da00236ac. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1663.765407] env[63024]: DEBUG oslo_concurrency.lockutils [req-54b005bc-45d0-4d50-86d2-23c6c93d456e req-16830189-559c-43bb-af95-32e4354a3f11 service nova] Acquiring lock "refresh_cache-8a826350-0fee-409d-a3fc-260d7d43bdf6" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1663.765407] env[63024]: DEBUG oslo_concurrency.lockutils [req-54b005bc-45d0-4d50-86d2-23c6c93d456e req-16830189-559c-43bb-af95-32e4354a3f11 service nova] Acquired lock "refresh_cache-8a826350-0fee-409d-a3fc-260d7d43bdf6" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1663.765775] env[63024]: DEBUG nova.network.neutron [req-54b005bc-45d0-4d50-86d2-23c6c93d456e req-16830189-559c-43bb-af95-32e4354a3f11 service nova] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Refreshing network info cache for port 36bc0224-4621-4b20-b039-244da00236ac {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1663.833920] env[63024]: DEBUG nova.compute.manager [None req-19d2b9d0-4aa4-47b3-85f8-fc2fcc925cbf tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1663.834393] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-19d2b9d0-4aa4-47b3-85f8-fc2fcc925cbf tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1663.835555] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d111ae8a-100b-4ae8-be94-1e2d3b0e38a0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.847311] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-19d2b9d0-4aa4-47b3-85f8-fc2fcc925cbf tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1663.847859] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e597c9dd-843f-4135-9df5-4696db3b0643 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.854605] env[63024]: DEBUG oslo_vmware.api [None req-19d2b9d0-4aa4-47b3-85f8-fc2fcc925cbf tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Waiting for the task: (returnval){ [ 1663.854605] env[63024]: value = "task-1950656" [ 1663.854605] env[63024]: _type = "Task" [ 1663.854605] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.864896] env[63024]: DEBUG oslo_vmware.api [None req-19d2b9d0-4aa4-47b3-85f8-fc2fcc925cbf tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Task: {'id': task-1950656, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1663.913115] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950655, 'name': CreateVM_Task, 'duration_secs': 0.425817} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1663.915294] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1663.916494] env[63024]: DEBUG nova.compute.manager [req-489fe17c-752e-4d88-abfd-279e29196298 req-2708ed75-8fac-4e51-aef6-5354c608e1d8 service nova] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Received event network-vif-deleted-72b9a842-8d9f-4bc9-945a-1b144bf0e58c {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1663.916755] env[63024]: INFO nova.compute.manager [req-489fe17c-752e-4d88-abfd-279e29196298 req-2708ed75-8fac-4e51-aef6-5354c608e1d8 service nova] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Neutron deleted interface 72b9a842-8d9f-4bc9-945a-1b144bf0e58c; detaching it from the instance and deleting it from the info cache [ 1663.917048] env[63024]: DEBUG nova.network.neutron [req-489fe17c-752e-4d88-abfd-279e29196298 req-2708ed75-8fac-4e51-aef6-5354c608e1d8 service nova] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1663.920240] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1663.920240] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1663.920240] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1663.920240] env[63024]: DEBUG oslo_concurrency.lockutils [None req-677364b9-353a-44ed-b88c-6aba81b1a05a tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Acquiring lock "00e925a1-9b79-46e2-b7f7-c0b63e1e72df" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1663.920609] env[63024]: DEBUG oslo_concurrency.lockutils [None req-677364b9-353a-44ed-b88c-6aba81b1a05a tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Lock "00e925a1-9b79-46e2-b7f7-c0b63e1e72df" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1663.920661] env[63024]: DEBUG oslo_concurrency.lockutils [None req-677364b9-353a-44ed-b88c-6aba81b1a05a tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Acquiring lock "00e925a1-9b79-46e2-b7f7-c0b63e1e72df-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1663.920913] env[63024]: DEBUG oslo_concurrency.lockutils [None req-677364b9-353a-44ed-b88c-6aba81b1a05a tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Lock "00e925a1-9b79-46e2-b7f7-c0b63e1e72df-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1663.922726] env[63024]: DEBUG oslo_concurrency.lockutils [None req-677364b9-353a-44ed-b88c-6aba81b1a05a tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Lock "00e925a1-9b79-46e2-b7f7-c0b63e1e72df-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1663.923089] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e99a1580-0d63-4e97-8ccd-a605e613aabc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1663.926227] env[63024]: INFO nova.compute.manager [None req-677364b9-353a-44ed-b88c-6aba81b1a05a tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Terminating instance [ 1663.933689] env[63024]: DEBUG nova.network.neutron [req-b8dce721-105c-4a98-b7b5-5539b4e0d79e req-21cc4945-db15-45e5-b376-d6f7a70a9d90 service nova] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Updated VIF entry in instance network info cache for port 209c547a-fef6-4e81-9221-59b72099faa5. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1663.934107] env[63024]: DEBUG nova.network.neutron [req-b8dce721-105c-4a98-b7b5-5539b4e0d79e req-21cc4945-db15-45e5-b376-d6f7a70a9d90 service nova] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Updating instance_info_cache with network_info: [{"id": "209c547a-fef6-4e81-9221-59b72099faa5", "address": "fa:16:3e:f0:17:5b", "network": {"id": "8b46e914-e3e8-40c4-8f9d-01d1f97bf25e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1558746173-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bbfeec6d47746328f185acd132e0d5a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afae6acd-1873-4228-9d5a-1cd5d4efe3e4", "external-id": "nsx-vlan-transportzone-183", "segmentation_id": 183, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap209c547a-fe", "ovs_interfaceid": "209c547a-fef6-4e81-9221-59b72099faa5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1663.936676] env[63024]: DEBUG oslo_vmware.api [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Waiting for the task: (returnval){ [ 1663.936676] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52da33fd-dfcb-adbb-4f8c-6e5b74213d4f" [ 1663.936676] env[63024]: _type = "Task" [ 1663.936676] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1663.948903] env[63024]: DEBUG oslo_vmware.api [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52da33fd-dfcb-adbb-4f8c-6e5b74213d4f, 'name': SearchDatastore_Task, 'duration_secs': 0.013849} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1663.949227] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1663.949462] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1663.949688] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1664.085849] env[63024]: INFO nova.compute.manager [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Took 38.82 seconds to build instance. [ 1664.139604] env[63024]: INFO nova.scheduler.client.report [None req-e2912dc9-5a6f-41f1-b223-953eb8bd64f9 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Deleted allocation for migration 85a4ec44-899c-4937-b93d-0eaabd8ff03f [ 1664.190381] env[63024]: INFO nova.compute.manager [-] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Took 1.04 seconds to deallocate network for instance. [ 1664.198144] env[63024]: DEBUG nova.network.neutron [-] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1664.227723] env[63024]: DEBUG oslo_vmware.api [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52a91478-adf8-a0c2-c9f9-f2c9f9aed245, 'name': SearchDatastore_Task, 'duration_secs': 0.021454} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1664.231024] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1664.231024] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 610dd030-5080-498a-8744-b1411297d70d/610dd030-5080-498a-8744-b1411297d70d.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1664.231024] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1664.231024] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1664.231387] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c87373e2-c3cc-49eb-989a-82f4d0cc6b96 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.231387] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3a268779-a2af-40d1-9ebe-d3a6ac40ff2a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.240258] env[63024]: DEBUG oslo_vmware.api [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1664.240258] env[63024]: value = "task-1950657" [ 1664.240258] env[63024]: _type = "Task" [ 1664.240258] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1664.244564] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1664.244753] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1664.245920] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de2f9095-82c2-4ae5-a91c-f4652644d349 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.252727] env[63024]: DEBUG oslo_vmware.api [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950657, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.256569] env[63024]: DEBUG oslo_vmware.api [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Waiting for the task: (returnval){ [ 1664.256569] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52287255-f49e-c9f8-c75c-37060c481683" [ 1664.256569] env[63024]: _type = "Task" [ 1664.256569] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1664.265700] env[63024]: DEBUG oslo_vmware.api [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52287255-f49e-c9f8-c75c-37060c481683, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.369379] env[63024]: DEBUG oslo_vmware.api [None req-19d2b9d0-4aa4-47b3-85f8-fc2fcc925cbf tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Task: {'id': task-1950656, 'name': PowerOffVM_Task, 'duration_secs': 0.307235} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1664.369813] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-19d2b9d0-4aa4-47b3-85f8-fc2fcc925cbf tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1664.370355] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-19d2b9d0-4aa4-47b3-85f8-fc2fcc925cbf tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1664.370355] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bae27228-8145-4dd5-af90-e9567d6753d3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.423889] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fe3afc4f-9a8a-440b-8886-9a8e76a6b075 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.433959] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c82ff09-50a5-40ee-853e-be8e5678fa1d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.447582] env[63024]: DEBUG oslo_concurrency.lockutils [req-b8dce721-105c-4a98-b7b5-5539b4e0d79e req-21cc4945-db15-45e5-b376-d6f7a70a9d90 service nova] Releasing lock "refresh_cache-e8ad74ce-7862-4574-98e7-14bc54bd5d6c" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1664.448378] env[63024]: DEBUG nova.compute.manager [None req-677364b9-353a-44ed-b88c-6aba81b1a05a tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1664.448608] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-677364b9-353a-44ed-b88c-6aba81b1a05a tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1664.449517] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3637e0f9-3ddd-4f23-b206-4b4e1fbebad8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.466041] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-677364b9-353a-44ed-b88c-6aba81b1a05a tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1664.481217] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c615b755-06d5-485e-9f7e-613611c3c37d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.483238] env[63024]: DEBUG nova.compute.manager [req-489fe17c-752e-4d88-abfd-279e29196298 req-2708ed75-8fac-4e51-aef6-5354c608e1d8 service nova] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Detach interface failed, port_id=72b9a842-8d9f-4bc9-945a-1b144bf0e58c, reason: Instance ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 1664.483786] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-19d2b9d0-4aa4-47b3-85f8-fc2fcc925cbf tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1664.484028] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-19d2b9d0-4aa4-47b3-85f8-fc2fcc925cbf tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1664.484250] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-19d2b9d0-4aa4-47b3-85f8-fc2fcc925cbf tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Deleting the datastore file [datastore1] b629b4f8-f79f-4361-b78c-8705a6888a9e {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1664.484497] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c26d0126-130d-4c3d-9134-ccc862f2d660 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.491056] env[63024]: DEBUG oslo_vmware.api [None req-677364b9-353a-44ed-b88c-6aba81b1a05a tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Waiting for the task: (returnval){ [ 1664.491056] env[63024]: value = "task-1950659" [ 1664.491056] env[63024]: _type = "Task" [ 1664.491056] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1664.492447] env[63024]: DEBUG oslo_vmware.api [None req-19d2b9d0-4aa4-47b3-85f8-fc2fcc925cbf tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Waiting for the task: (returnval){ [ 1664.492447] env[63024]: value = "task-1950660" [ 1664.492447] env[63024]: _type = "Task" [ 1664.492447] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1664.506828] env[63024]: DEBUG oslo_vmware.api [None req-677364b9-353a-44ed-b88c-6aba81b1a05a tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Task: {'id': task-1950659, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.510373] env[63024]: DEBUG oslo_vmware.api [None req-19d2b9d0-4aa4-47b3-85f8-fc2fcc925cbf tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Task: {'id': task-1950660, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.532728] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-000fcdec-dc0c-40e9-95e2-459236a962de {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.540720] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4d0bca0e-f424-4486-9436-f2c098d93b25 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Suspending the VM {{(pid=63024) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1664.541074] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-edbb7c80-39a8-4677-a297-7ce2d1a7926c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.543968] env[63024]: DEBUG nova.network.neutron [req-54b005bc-45d0-4d50-86d2-23c6c93d456e req-16830189-559c-43bb-af95-32e4354a3f11 service nova] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Updated VIF entry in instance network info cache for port 36bc0224-4621-4b20-b039-244da00236ac. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1664.544423] env[63024]: DEBUG nova.network.neutron [req-54b005bc-45d0-4d50-86d2-23c6c93d456e req-16830189-559c-43bb-af95-32e4354a3f11 service nova] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Updating instance_info_cache with network_info: [{"id": "36bc0224-4621-4b20-b039-244da00236ac", "address": "fa:16:3e:28:b4:46", "network": {"id": "ab62b987-c074-462f-ab8f-d851463f3ed7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1458114320-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eae1647405bf418ea6abde9723b2c895", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "604c9724-b4ef-4393-a76e-eb4a2b510796", "external-id": "nsx-vlan-transportzone-909", "segmentation_id": 909, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36bc0224-46", "ovs_interfaceid": "36bc0224-4621-4b20-b039-244da00236ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1664.550952] env[63024]: DEBUG oslo_vmware.api [None req-4d0bca0e-f424-4486-9436-f2c098d93b25 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1664.550952] env[63024]: value = "task-1950661" [ 1664.550952] env[63024]: _type = "Task" [ 1664.550952] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1664.563257] env[63024]: DEBUG oslo_vmware.api [None req-4d0bca0e-f424-4486-9436-f2c098d93b25 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950661, 'name': SuspendVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.583968] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fa57b7f8-56b8-491a-bbc8-689b86d27164 tempest-ServersAdmin275Test-964372884 tempest-ServersAdmin275Test-964372884-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1664.585564] env[63024]: DEBUG oslo_concurrency.lockutils [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.862s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1664.587988] env[63024]: INFO nova.compute.claims [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1664.591974] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5c45c070-464d-4a9b-936d-f6f7ad9f2764 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "1ad97ed0-2a84-4783-8511-e0f6b24861bd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.862s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1664.648063] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e2912dc9-5a6f-41f1-b223-953eb8bd64f9 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Lock "f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 35.060s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1664.698863] env[63024]: INFO nova.compute.manager [None req-cdec4f1c-4040-4138-93d8-90d551e39ffe tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Instance disappeared during terminate [ 1664.698863] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cdec4f1c-4040-4138-93d8-90d551e39ffe tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Lock "e2138192-14e0-43d2-9d19-9820747d7217" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.286s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1664.701507] env[63024]: INFO nova.compute.manager [-] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Took 1.47 seconds to deallocate network for instance. [ 1664.756678] env[63024]: DEBUG oslo_vmware.api [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950657, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1664.774776] env[63024]: DEBUG oslo_vmware.api [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52287255-f49e-c9f8-c75c-37060c481683, 'name': SearchDatastore_Task, 'duration_secs': 0.010596} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1664.776312] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cbb4c49a-cb32-4f60-8bda-bcd36f1efb46 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1664.785230] env[63024]: DEBUG oslo_vmware.api [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Waiting for the task: (returnval){ [ 1664.785230] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]522cec84-f46c-88bc-e6e1-2ef8f347827d" [ 1664.785230] env[63024]: _type = "Task" [ 1664.785230] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1664.801374] env[63024]: DEBUG oslo_vmware.api [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]522cec84-f46c-88bc-e6e1-2ef8f347827d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.011036] env[63024]: DEBUG oslo_vmware.api [None req-19d2b9d0-4aa4-47b3-85f8-fc2fcc925cbf tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Task: {'id': task-1950660, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.012031] env[63024]: DEBUG oslo_vmware.api [None req-677364b9-353a-44ed-b88c-6aba81b1a05a tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Task: {'id': task-1950659, 'name': PowerOffVM_Task, 'duration_secs': 0.33439} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1665.012031] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-677364b9-353a-44ed-b88c-6aba81b1a05a tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1665.013037] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-677364b9-353a-44ed-b88c-6aba81b1a05a tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1665.013037] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cf334e5c-7233-4833-b493-d2791909dbf0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.047860] env[63024]: DEBUG oslo_concurrency.lockutils [req-54b005bc-45d0-4d50-86d2-23c6c93d456e req-16830189-559c-43bb-af95-32e4354a3f11 service nova] Releasing lock "refresh_cache-8a826350-0fee-409d-a3fc-260d7d43bdf6" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1665.063548] env[63024]: DEBUG oslo_vmware.api [None req-4d0bca0e-f424-4486-9436-f2c098d93b25 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950661, 'name': SuspendVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.138852] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-677364b9-353a-44ed-b88c-6aba81b1a05a tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1665.139862] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-677364b9-353a-44ed-b88c-6aba81b1a05a tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1665.139862] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-677364b9-353a-44ed-b88c-6aba81b1a05a tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Deleting the datastore file [datastore1] 00e925a1-9b79-46e2-b7f7-c0b63e1e72df {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1665.139862] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4c355434-4691-4039-8079-875ae8cd66b0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.151649] env[63024]: DEBUG oslo_vmware.api [None req-677364b9-353a-44ed-b88c-6aba81b1a05a tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Waiting for the task: (returnval){ [ 1665.151649] env[63024]: value = "task-1950663" [ 1665.151649] env[63024]: _type = "Task" [ 1665.151649] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1665.160206] env[63024]: DEBUG oslo_vmware.api [None req-677364b9-353a-44ed-b88c-6aba81b1a05a tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Task: {'id': task-1950663, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.212246] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e6138a81-3a08-4fc5-99c4-2b2ab88def37 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1665.252461] env[63024]: DEBUG oslo_vmware.api [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950657, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.616175} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1665.252817] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 610dd030-5080-498a-8744-b1411297d70d/610dd030-5080-498a-8744-b1411297d70d.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1665.253173] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1665.253611] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c482bf3c-2239-4f35-888a-0d1c920ace7e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.262934] env[63024]: DEBUG oslo_vmware.api [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1665.262934] env[63024]: value = "task-1950664" [ 1665.262934] env[63024]: _type = "Task" [ 1665.262934] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1665.272313] env[63024]: DEBUG oslo_vmware.api [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950664, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.295535] env[63024]: DEBUG oslo_vmware.api [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]522cec84-f46c-88bc-e6e1-2ef8f347827d, 'name': SearchDatastore_Task, 'duration_secs': 0.046035} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1665.295854] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1665.296131] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 8a826350-0fee-409d-a3fc-260d7d43bdf6/8a826350-0fee-409d-a3fc-260d7d43bdf6.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1665.296425] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e8d4a6dc-9640-4cb8-ae94-f3d57c3b0465 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.304178] env[63024]: DEBUG oslo_vmware.api [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Waiting for the task: (returnval){ [ 1665.304178] env[63024]: value = "task-1950665" [ 1665.304178] env[63024]: _type = "Task" [ 1665.304178] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1665.313543] env[63024]: DEBUG oslo_vmware.api [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Task: {'id': task-1950665, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.507380] env[63024]: DEBUG oslo_vmware.api [None req-19d2b9d0-4aa4-47b3-85f8-fc2fcc925cbf tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Task: {'id': task-1950660, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.527591} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1665.507772] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-19d2b9d0-4aa4-47b3-85f8-fc2fcc925cbf tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1665.507932] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-19d2b9d0-4aa4-47b3-85f8-fc2fcc925cbf tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1665.508121] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-19d2b9d0-4aa4-47b3-85f8-fc2fcc925cbf tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1665.508355] env[63024]: INFO nova.compute.manager [None req-19d2b9d0-4aa4-47b3-85f8-fc2fcc925cbf tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Took 1.67 seconds to destroy the instance on the hypervisor. [ 1665.508669] env[63024]: DEBUG oslo.service.loopingcall [None req-19d2b9d0-4aa4-47b3-85f8-fc2fcc925cbf tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1665.508877] env[63024]: DEBUG nova.compute.manager [-] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1665.509052] env[63024]: DEBUG nova.network.neutron [-] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1665.563173] env[63024]: DEBUG oslo_vmware.api [None req-4d0bca0e-f424-4486-9436-f2c098d93b25 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950661, 'name': SuspendVM_Task, 'duration_secs': 0.725455} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1665.563364] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4d0bca0e-f424-4486-9436-f2c098d93b25 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Suspended the VM {{(pid=63024) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1665.563537] env[63024]: DEBUG nova.compute.manager [None req-4d0bca0e-f424-4486-9436-f2c098d93b25 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1665.564387] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b7ac178-aca3-4bf7-b81f-835a8957f794 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.671896] env[63024]: DEBUG oslo_vmware.api [None req-677364b9-353a-44ed-b88c-6aba81b1a05a tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Task: {'id': task-1950663, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.234456} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1665.671896] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-677364b9-353a-44ed-b88c-6aba81b1a05a tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1665.672470] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-677364b9-353a-44ed-b88c-6aba81b1a05a tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1665.672470] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-677364b9-353a-44ed-b88c-6aba81b1a05a tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1665.672470] env[63024]: INFO nova.compute.manager [None req-677364b9-353a-44ed-b88c-6aba81b1a05a tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Took 1.22 seconds to destroy the instance on the hypervisor. [ 1665.673850] env[63024]: DEBUG oslo.service.loopingcall [None req-677364b9-353a-44ed-b88c-6aba81b1a05a tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1665.673850] env[63024]: DEBUG nova.compute.manager [-] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1665.673850] env[63024]: DEBUG nova.network.neutron [-] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1665.773695] env[63024]: DEBUG oslo_vmware.api [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950664, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.110675} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1665.774190] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1665.777953] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bc38b8f-fbfd-43cf-b466-baf9066df1f7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.803764] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Reconfiguring VM instance instance-0000001e to attach disk [datastore1] 610dd030-5080-498a-8744-b1411297d70d/610dd030-5080-498a-8744-b1411297d70d.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1665.806269] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-156440f8-558e-4a22-9e3e-097403676578 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1665.833283] env[63024]: DEBUG oslo_vmware.api [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Task: {'id': task-1950665, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.835217] env[63024]: DEBUG oslo_vmware.api [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1665.835217] env[63024]: value = "task-1950667" [ 1665.835217] env[63024]: _type = "Task" [ 1665.835217] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1665.848230] env[63024]: DEBUG oslo_vmware.api [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950667, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1665.968438] env[63024]: DEBUG nova.compute.manager [req-c61ce283-c414-4494-b6da-a0545495f217 req-86772da4-0ec2-4f6b-bc76-958d8b9e5846 service nova] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Received event network-vif-deleted-fe1aa30b-c99e-4641-9d91-c99d20670de0 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1665.969496] env[63024]: INFO nova.compute.manager [req-c61ce283-c414-4494-b6da-a0545495f217 req-86772da4-0ec2-4f6b-bc76-958d8b9e5846 service nova] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Neutron deleted interface fe1aa30b-c99e-4641-9d91-c99d20670de0; detaching it from the instance and deleting it from the info cache [ 1665.969496] env[63024]: DEBUG nova.network.neutron [req-c61ce283-c414-4494-b6da-a0545495f217 req-86772da4-0ec2-4f6b-bc76-958d8b9e5846 service nova] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1666.182287] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d99563a2-b1a4-4aed-b4c6-d5f948f8d511 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.190265] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26ec39f1-b6a4-494c-886f-77d6dc3fb3cd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.224709] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-debdd9f6-3238-40ce-9fa9-95aef19896c1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.228695] env[63024]: DEBUG nova.compute.manager [req-65439f26-41be-4a7e-b813-d60e71973d7b req-0bbd973f-0b2f-4724-b082-6eb108ab4e49 service nova] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Received event network-vif-deleted-d38e9043-9c6b-47a5-b6db-5d1ac7035c12 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1666.228901] env[63024]: INFO nova.compute.manager [req-65439f26-41be-4a7e-b813-d60e71973d7b req-0bbd973f-0b2f-4724-b082-6eb108ab4e49 service nova] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Neutron deleted interface d38e9043-9c6b-47a5-b6db-5d1ac7035c12; detaching it from the instance and deleting it from the info cache [ 1666.229121] env[63024]: DEBUG nova.network.neutron [req-65439f26-41be-4a7e-b813-d60e71973d7b req-0bbd973f-0b2f-4724-b082-6eb108ab4e49 service nova] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1666.237796] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20efde66-9082-4c1b-93bb-a424a2089d84 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.255603] env[63024]: DEBUG nova.compute.provider_tree [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1666.334109] env[63024]: DEBUG oslo_vmware.api [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Task: {'id': task-1950665, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.706094} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1666.334398] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 8a826350-0fee-409d-a3fc-260d7d43bdf6/8a826350-0fee-409d-a3fc-260d7d43bdf6.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1666.334620] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1666.334873] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-199f39d1-957e-462d-9fc4-35352cfceebe {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.347844] env[63024]: DEBUG oslo_vmware.api [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950667, 'name': ReconfigVM_Task, 'duration_secs': 0.371914} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1666.349051] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Reconfigured VM instance instance-0000001e to attach disk [datastore1] 610dd030-5080-498a-8744-b1411297d70d/610dd030-5080-498a-8744-b1411297d70d.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1666.349766] env[63024]: DEBUG oslo_vmware.api [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Waiting for the task: (returnval){ [ 1666.349766] env[63024]: value = "task-1950668" [ 1666.349766] env[63024]: _type = "Task" [ 1666.349766] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1666.349950] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-eb6b0359-aa75-466f-be7d-4ca4814f37e7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.360854] env[63024]: DEBUG oslo_vmware.api [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Task: {'id': task-1950668, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1666.362255] env[63024]: DEBUG oslo_vmware.api [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1666.362255] env[63024]: value = "task-1950669" [ 1666.362255] env[63024]: _type = "Task" [ 1666.362255] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1666.372415] env[63024]: DEBUG oslo_vmware.api [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950669, 'name': Rename_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1666.436065] env[63024]: DEBUG nova.network.neutron [-] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1666.471903] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dbe73e2a-819f-45fc-b3c6-ca558940a8e5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.481660] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e83d8a91-ec37-4e60-a0b1-2116ef749ad2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.514540] env[63024]: DEBUG nova.network.neutron [-] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1666.516106] env[63024]: DEBUG nova.compute.manager [req-c61ce283-c414-4494-b6da-a0545495f217 req-86772da4-0ec2-4f6b-bc76-958d8b9e5846 service nova] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Detach interface failed, port_id=fe1aa30b-c99e-4641-9d91-c99d20670de0, reason: Instance b629b4f8-f79f-4361-b78c-8705a6888a9e could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 1666.732655] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-982fb7b4-4058-4c0c-8300-12b937ac7a3a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.744008] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70d1c666-5e96-4faa-bbac-f862fc163838 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.778413] env[63024]: DEBUG nova.compute.manager [req-65439f26-41be-4a7e-b813-d60e71973d7b req-0bbd973f-0b2f-4724-b082-6eb108ab4e49 service nova] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Detach interface failed, port_id=d38e9043-9c6b-47a5-b6db-5d1ac7035c12, reason: Instance 00e925a1-9b79-46e2-b7f7-c0b63e1e72df could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 1666.779599] env[63024]: ERROR nova.scheduler.client.report [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [req-2bff9292-77d7-4b55-971b-43417d10df33] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 89dfa68a-133e-436f-a9f1-86051f9fb96b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-2bff9292-77d7-4b55-971b-43417d10df33"}]} [ 1666.798604] env[63024]: DEBUG nova.scheduler.client.report [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Refreshing inventories for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1666.813719] env[63024]: DEBUG nova.scheduler.client.report [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Updating ProviderTree inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1666.814014] env[63024]: DEBUG nova.compute.provider_tree [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1666.825859] env[63024]: DEBUG nova.scheduler.client.report [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Refreshing aggregate associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, aggregates: None {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1666.845365] env[63024]: DEBUG nova.scheduler.client.report [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Refreshing trait associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1666.861950] env[63024]: DEBUG oslo_vmware.api [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Task: {'id': task-1950668, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.144449} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1666.862263] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1666.863100] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca69e0d0-91c8-4d88-b55e-e7428b056bb7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.876822] env[63024]: DEBUG oslo_vmware.api [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950669, 'name': Rename_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1666.895286] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Reconfiguring VM instance instance-0000001f to attach disk [datastore1] 8a826350-0fee-409d-a3fc-260d7d43bdf6/8a826350-0fee-409d-a3fc-260d7d43bdf6.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1666.898244] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c5869bca-19ca-41ed-af88-c99968fca255 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1666.918857] env[63024]: DEBUG oslo_vmware.api [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Waiting for the task: (returnval){ [ 1666.918857] env[63024]: value = "task-1950670" [ 1666.918857] env[63024]: _type = "Task" [ 1666.918857] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1666.930617] env[63024]: DEBUG oslo_vmware.api [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Task: {'id': task-1950670, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1666.940123] env[63024]: INFO nova.compute.manager [-] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Took 1.43 seconds to deallocate network for instance. [ 1667.017408] env[63024]: INFO nova.compute.manager [-] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Took 1.34 seconds to deallocate network for instance. [ 1667.301819] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58d7c864-07f5-4812-aaae-19162b1d6632 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.309554] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4c7b973-6112-47dd-9771-fa6c8a9a6029 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.343854] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a256e54-5a0d-4b39-abaf-56d15ff653af {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.352182] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99e98274-ca7b-47a7-abc1-8ac48ed32d3e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.366383] env[63024]: DEBUG nova.compute.provider_tree [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1667.376391] env[63024]: DEBUG oslo_vmware.api [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950669, 'name': Rename_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.429543] env[63024]: DEBUG oslo_vmware.api [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Task: {'id': task-1950670, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.449030] env[63024]: DEBUG oslo_concurrency.lockutils [None req-19d2b9d0-4aa4-47b3-85f8-fc2fcc925cbf tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1667.449648] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Acquiring lock "650a97b9-911e-44b0-9e82-a6d4cc95c9dd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1667.449648] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Lock "650a97b9-911e-44b0-9e82-a6d4cc95c9dd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1667.525205] env[63024]: DEBUG oslo_concurrency.lockutils [None req-677364b9-353a-44ed-b88c-6aba81b1a05a tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1667.877090] env[63024]: DEBUG oslo_vmware.api [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950669, 'name': Rename_Task, 'duration_secs': 1.14233} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1667.877395] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1667.877683] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-92bc768d-e2ad-46f3-8f36-04e0df164660 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.884424] env[63024]: DEBUG oslo_vmware.api [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1667.884424] env[63024]: value = "task-1950671" [ 1667.884424] env[63024]: _type = "Task" [ 1667.884424] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1667.892199] env[63024]: DEBUG oslo_vmware.api [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950671, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.893905] env[63024]: ERROR nova.scheduler.client.report [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [req-294cba14-bc46-42fa-8a0c-4ea186033738] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 89dfa68a-133e-436f-a9f1-86051f9fb96b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-294cba14-bc46-42fa-8a0c-4ea186033738"}]} [ 1667.912045] env[63024]: DEBUG nova.scheduler.client.report [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Refreshing inventories for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1667.930541] env[63024]: DEBUG oslo_vmware.api [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Task: {'id': task-1950670, 'name': ReconfigVM_Task, 'duration_secs': 0.56204} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1667.931578] env[63024]: DEBUG nova.scheduler.client.report [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Updating ProviderTree inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1667.931781] env[63024]: DEBUG nova.compute.provider_tree [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1667.933909] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Reconfigured VM instance instance-0000001f to attach disk [datastore1] 8a826350-0fee-409d-a3fc-260d7d43bdf6/8a826350-0fee-409d-a3fc-260d7d43bdf6.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1667.934903] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7cc949c7-4ed2-4e39-8c1f-54df793d23cb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.941717] env[63024]: DEBUG oslo_vmware.api [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Waiting for the task: (returnval){ [ 1667.941717] env[63024]: value = "task-1950672" [ 1667.941717] env[63024]: _type = "Task" [ 1667.941717] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1667.946820] env[63024]: DEBUG nova.scheduler.client.report [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Refreshing aggregate associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, aggregates: None {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1667.954054] env[63024]: DEBUG nova.compute.manager [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1667.956822] env[63024]: DEBUG oslo_vmware.api [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Task: {'id': task-1950672, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1667.966473] env[63024]: DEBUG nova.scheduler.client.report [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Refreshing trait associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1668.126842] env[63024]: DEBUG nova.compute.manager [None req-937596a1-291a-4c0f-a0d4-2ef4220afec7 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1668.128804] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aacc1414-2896-4bf9-8b1e-8ec0f89e6aef {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.383098] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38c8241b-9db4-47ec-8c20-4d6034068a11 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.396789] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7a993f7-64c7-4c62-8364-ac1e14eb5b98 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.400372] env[63024]: DEBUG oslo_vmware.api [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950671, 'name': PowerOnVM_Task, 'duration_secs': 0.493766} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1668.400678] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1668.400898] env[63024]: INFO nova.compute.manager [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Took 9.90 seconds to spawn the instance on the hypervisor. [ 1668.401118] env[63024]: DEBUG nova.compute.manager [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1668.402239] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1da33a02-937d-49e7-bc86-520507aea32f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.431514] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ce9053a-3f96-4974-b692-87043688efc0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.447028] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39ece201-f262-4df5-afe0-43e8872f4fca {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.455878] env[63024]: DEBUG oslo_vmware.api [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Task: {'id': task-1950672, 'name': Rename_Task, 'duration_secs': 0.152} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1668.463530] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1668.466313] env[63024]: DEBUG nova.compute.provider_tree [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1668.467570] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9339d729-0b61-4fd9-9184-2f1cccf0ec7a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.478857] env[63024]: DEBUG oslo_vmware.api [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Waiting for the task: (returnval){ [ 1668.478857] env[63024]: value = "task-1950673" [ 1668.478857] env[63024]: _type = "Task" [ 1668.478857] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1668.485125] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1668.489255] env[63024]: DEBUG oslo_vmware.api [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Task: {'id': task-1950673, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.643294] env[63024]: INFO nova.compute.manager [None req-937596a1-291a-4c0f-a0d4-2ef4220afec7 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] instance snapshotting [ 1668.643294] env[63024]: WARNING nova.compute.manager [None req-937596a1-291a-4c0f-a0d4-2ef4220afec7 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 1668.644481] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0751195-34b4-4713-b5e4-41938c2d854a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.666622] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2f6e617-8b14-409f-bdb6-abfecbfa2514 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1668.946340] env[63024]: INFO nova.compute.manager [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Took 38.22 seconds to build instance. [ 1668.988800] env[63024]: DEBUG oslo_vmware.api [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Task: {'id': task-1950673, 'name': PowerOnVM_Task} progress is 1%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1668.994178] env[63024]: ERROR nova.scheduler.client.report [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [req-1e852960-7432-4763-ab92-3f6170eef749] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 89dfa68a-133e-436f-a9f1-86051f9fb96b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-1e852960-7432-4763-ab92-3f6170eef749"}]} [ 1669.010177] env[63024]: DEBUG nova.scheduler.client.report [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Refreshing inventories for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1669.024737] env[63024]: DEBUG nova.scheduler.client.report [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Updating ProviderTree inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1669.024971] env[63024]: DEBUG nova.compute.provider_tree [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1669.036860] env[63024]: DEBUG nova.scheduler.client.report [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Refreshing aggregate associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, aggregates: None {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1669.052915] env[63024]: DEBUG nova.scheduler.client.report [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Refreshing trait associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1669.178113] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-937596a1-291a-4c0f-a0d4-2ef4220afec7 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Creating Snapshot of the VM instance {{(pid=63024) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1669.178424] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-91e37490-05fe-496e-8ab2-2b1f880e0f9f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.187848] env[63024]: DEBUG oslo_vmware.api [None req-937596a1-291a-4c0f-a0d4-2ef4220afec7 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1669.187848] env[63024]: value = "task-1950674" [ 1669.187848] env[63024]: _type = "Task" [ 1669.187848] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1669.196216] env[63024]: DEBUG oslo_vmware.api [None req-937596a1-291a-4c0f-a0d4-2ef4220afec7 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950674, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1669.228038] env[63024]: DEBUG oslo_concurrency.lockutils [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Acquiring lock "49eb6292-012a-4296-aff8-9c460866a602" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1669.228135] env[63024]: DEBUG oslo_concurrency.lockutils [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Lock "49eb6292-012a-4296-aff8-9c460866a602" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1669.448179] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3d17f146-3ec5-48b8-b281-6365392ffe93 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "610dd030-5080-498a-8744-b1411297d70d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 72.315s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1669.451192] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6cd8926-b63f-4ce8-a7ee-13295f1d859d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.458781] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-353ec5df-c628-47d5-beb2-039d9ed4eaae {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.493965] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89204854-373b-449f-ba9e-989a7ed5b60f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.504289] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74b50b1e-cf77-441d-98f0-3efb9c0318b9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1669.508036] env[63024]: DEBUG oslo_vmware.api [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Task: {'id': task-1950673, 'name': PowerOnVM_Task} progress is 1%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1669.518215] env[63024]: DEBUG nova.compute.provider_tree [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1669.697537] env[63024]: DEBUG oslo_vmware.api [None req-937596a1-291a-4c0f-a0d4-2ef4220afec7 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950674, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1669.730218] env[63024]: DEBUG nova.compute.manager [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1669.744966] env[63024]: INFO nova.compute.manager [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Rebuilding instance [ 1669.864548] env[63024]: DEBUG nova.compute.manager [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1669.865585] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ca4f492-0886-48d6-8feb-9313999c2f3e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.000063] env[63024]: DEBUG oslo_vmware.api [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Task: {'id': task-1950673, 'name': PowerOnVM_Task} progress is 1%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.051546] env[63024]: DEBUG nova.scheduler.client.report [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Updated inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with generation 55 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1670.051804] env[63024]: DEBUG nova.compute.provider_tree [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Updating resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b generation from 55 to 56 during operation: update_inventory {{(pid=63024) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1670.051985] env[63024]: DEBUG nova.compute.provider_tree [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1670.198503] env[63024]: DEBUG oslo_vmware.api [None req-937596a1-291a-4c0f-a0d4-2ef4220afec7 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950674, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.249131] env[63024]: DEBUG oslo_concurrency.lockutils [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1670.501183] env[63024]: DEBUG oslo_vmware.api [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Task: {'id': task-1950673, 'name': PowerOnVM_Task} progress is 64%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.558080] env[63024]: DEBUG oslo_concurrency.lockutils [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 5.973s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1670.558664] env[63024]: DEBUG nova.compute.manager [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1670.561584] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.533s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1670.564566] env[63024]: INFO nova.compute.claims [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1670.698952] env[63024]: DEBUG oslo_vmware.api [None req-937596a1-291a-4c0f-a0d4-2ef4220afec7 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950674, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.811312] env[63024]: DEBUG oslo_vmware.rw_handles [None req-74e00bbc-9ded-41a3-ad3a-f6152c6eedcd tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526987df-4e38-9eb4-d7c2-8ab81847f04a/disk-0.vmdk. {{(pid=63024) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1670.812402] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7825743-9cb7-450f-b927-37f3453c482f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.819355] env[63024]: DEBUG oslo_vmware.rw_handles [None req-74e00bbc-9ded-41a3-ad3a-f6152c6eedcd tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526987df-4e38-9eb4-d7c2-8ab81847f04a/disk-0.vmdk is in state: ready. {{(pid=63024) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1670.819514] env[63024]: ERROR oslo_vmware.rw_handles [None req-74e00bbc-9ded-41a3-ad3a-f6152c6eedcd tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526987df-4e38-9eb4-d7c2-8ab81847f04a/disk-0.vmdk due to incomplete transfer. [ 1670.819757] env[63024]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-443dcfa8-6e10-4df6-b8f2-d235b2337889 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.826891] env[63024]: DEBUG oslo_vmware.rw_handles [None req-74e00bbc-9ded-41a3-ad3a-f6152c6eedcd tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/526987df-4e38-9eb4-d7c2-8ab81847f04a/disk-0.vmdk. {{(pid=63024) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1670.827149] env[63024]: DEBUG nova.virt.vmwareapi.images [None req-74e00bbc-9ded-41a3-ad3a-f6152c6eedcd tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Uploaded image 1f6dc89a-45dd-4296-a06c-4feac41636c3 to the Glance image server {{(pid=63024) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1670.829664] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-74e00bbc-9ded-41a3-ad3a-f6152c6eedcd tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Destroying the VM {{(pid=63024) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1670.829987] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-7ed2771c-59e6-4b54-8e4b-09bb4d65c207 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.837267] env[63024]: DEBUG oslo_vmware.api [None req-74e00bbc-9ded-41a3-ad3a-f6152c6eedcd tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Waiting for the task: (returnval){ [ 1670.837267] env[63024]: value = "task-1950675" [ 1670.837267] env[63024]: _type = "Task" [ 1670.837267] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1670.845633] env[63024]: DEBUG oslo_vmware.api [None req-74e00bbc-9ded-41a3-ad3a-f6152c6eedcd tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950675, 'name': Destroy_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1670.879422] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1670.879852] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7a0e31e5-9eb0-47ce-87da-8d4135f72810 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1670.887567] env[63024]: DEBUG oslo_vmware.api [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1670.887567] env[63024]: value = "task-1950676" [ 1670.887567] env[63024]: _type = "Task" [ 1670.887567] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1670.897328] env[63024]: DEBUG oslo_vmware.api [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950676, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.001129] env[63024]: DEBUG oslo_vmware.api [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Task: {'id': task-1950673, 'name': PowerOnVM_Task, 'duration_secs': 2.254418} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1671.001442] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1671.001671] env[63024]: INFO nova.compute.manager [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Took 9.90 seconds to spawn the instance on the hypervisor. [ 1671.001884] env[63024]: DEBUG nova.compute.manager [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1671.002740] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1ab45a8-30de-4fde-a44f-2bb0e9d43852 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.073186] env[63024]: DEBUG nova.compute.utils [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1671.074734] env[63024]: DEBUG nova.compute.manager [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1671.074734] env[63024]: DEBUG nova.network.neutron [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1671.153938] env[63024]: DEBUG nova.policy [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '17fb87e2577e46858934f157a394a590', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9981ec11228244fd8b75ee951a940c85', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1671.199288] env[63024]: DEBUG oslo_vmware.api [None req-937596a1-291a-4c0f-a0d4-2ef4220afec7 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950674, 'name': CreateSnapshot_Task, 'duration_secs': 1.831213} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1671.199558] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-937596a1-291a-4c0f-a0d4-2ef4220afec7 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Created Snapshot of the VM instance {{(pid=63024) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1671.200352] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6551fa67-a919-45b8-a172-b8b18be54435 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.348577] env[63024]: DEBUG oslo_vmware.api [None req-74e00bbc-9ded-41a3-ad3a-f6152c6eedcd tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950675, 'name': Destroy_Task} progress is 33%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.397278] env[63024]: DEBUG oslo_vmware.api [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950676, 'name': PowerOffVM_Task, 'duration_secs': 0.424265} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1671.401019] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1671.401019] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1671.401019] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b45d5c5-a8bf-4759-a3ae-f58d3fbded17 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.406411] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1671.406788] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-515267c6-734a-4aad-a72c-de77f8366a90 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.481020] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1671.481020] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1671.481020] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Deleting the datastore file [datastore1] 610dd030-5080-498a-8744-b1411297d70d {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1671.481020] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0aae0839-237d-4e15-918f-75bc93c7117d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.488462] env[63024]: DEBUG oslo_vmware.api [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1671.488462] env[63024]: value = "task-1950678" [ 1671.488462] env[63024]: _type = "Task" [ 1671.488462] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1671.499631] env[63024]: DEBUG oslo_vmware.api [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950678, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.523208] env[63024]: INFO nova.compute.manager [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Took 40.66 seconds to build instance. [ 1671.581500] env[63024]: DEBUG nova.compute.manager [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1671.724818] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-937596a1-291a-4c0f-a0d4-2ef4220afec7 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Creating linked-clone VM from snapshot {{(pid=63024) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1671.725525] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-195b05d6-51cd-4677-a6f2-37a8cfa6df14 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.737486] env[63024]: DEBUG oslo_vmware.api [None req-937596a1-291a-4c0f-a0d4-2ef4220afec7 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1671.737486] env[63024]: value = "task-1950679" [ 1671.737486] env[63024]: _type = "Task" [ 1671.737486] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1671.741759] env[63024]: DEBUG nova.network.neutron [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Successfully created port: 008138b2-5e34-470d-b8f1-93b1ca8df541 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1671.754007] env[63024]: DEBUG oslo_vmware.api [None req-937596a1-291a-4c0f-a0d4-2ef4220afec7 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950679, 'name': CloneVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.850282] env[63024]: DEBUG oslo_vmware.api [None req-74e00bbc-9ded-41a3-ad3a-f6152c6eedcd tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950675, 'name': Destroy_Task, 'duration_secs': 0.931837} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1671.850771] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-74e00bbc-9ded-41a3-ad3a-f6152c6eedcd tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Destroyed the VM [ 1671.851144] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-74e00bbc-9ded-41a3-ad3a-f6152c6eedcd tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Deleting Snapshot of the VM instance {{(pid=63024) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1671.851435] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-be458588-3ed7-449c-801a-bfac61c902e4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1671.857805] env[63024]: DEBUG oslo_vmware.api [None req-74e00bbc-9ded-41a3-ad3a-f6152c6eedcd tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Waiting for the task: (returnval){ [ 1671.857805] env[63024]: value = "task-1950680" [ 1671.857805] env[63024]: _type = "Task" [ 1671.857805] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1671.867236] env[63024]: DEBUG oslo_vmware.api [None req-74e00bbc-9ded-41a3-ad3a-f6152c6eedcd tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950680, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1671.998693] env[63024]: DEBUG oslo_vmware.api [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950678, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.290114} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1672.001239] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1672.001430] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1672.001604] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1672.028588] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ec869f54-4a37-440a-a21b-e009f9977630 tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Lock "8a826350-0fee-409d-a3fc-260d7d43bdf6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.536s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1672.107772] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ae3bb64-2dd7-4bd0-bcfb-d262b1ee9e01 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.117900] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2707c1d-2123-4e62-9ad9-1590eec4dfef {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.148282] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7f873f1-57bf-4782-ba0f-a547d26c82d3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.155997] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8130931-f9dc-4ae5-ae4d-85ec8f729b72 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.172599] env[63024]: DEBUG nova.compute.provider_tree [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1672.251923] env[63024]: DEBUG oslo_vmware.api [None req-937596a1-291a-4c0f-a0d4-2ef4220afec7 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950679, 'name': CloneVM_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.372826] env[63024]: DEBUG oslo_vmware.api [None req-74e00bbc-9ded-41a3-ad3a-f6152c6eedcd tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950680, 'name': RemoveSnapshot_Task} progress is 12%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.496630] env[63024]: DEBUG nova.compute.manager [req-9f51fd18-522a-44a5-b2bf-18ee46a2bcc2 req-f67f21b0-8485-44ca-b24d-9cc6ab62b97a service nova] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Received event network-changed-36bc0224-4621-4b20-b039-244da00236ac {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1672.496842] env[63024]: DEBUG nova.compute.manager [req-9f51fd18-522a-44a5-b2bf-18ee46a2bcc2 req-f67f21b0-8485-44ca-b24d-9cc6ab62b97a service nova] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Refreshing instance network info cache due to event network-changed-36bc0224-4621-4b20-b039-244da00236ac. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1672.497567] env[63024]: DEBUG oslo_concurrency.lockutils [req-9f51fd18-522a-44a5-b2bf-18ee46a2bcc2 req-f67f21b0-8485-44ca-b24d-9cc6ab62b97a service nova] Acquiring lock "refresh_cache-8a826350-0fee-409d-a3fc-260d7d43bdf6" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1672.497795] env[63024]: DEBUG oslo_concurrency.lockutils [req-9f51fd18-522a-44a5-b2bf-18ee46a2bcc2 req-f67f21b0-8485-44ca-b24d-9cc6ab62b97a service nova] Acquired lock "refresh_cache-8a826350-0fee-409d-a3fc-260d7d43bdf6" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1672.498044] env[63024]: DEBUG nova.network.neutron [req-9f51fd18-522a-44a5-b2bf-18ee46a2bcc2 req-f67f21b0-8485-44ca-b24d-9cc6ab62b97a service nova] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Refreshing network info cache for port 36bc0224-4621-4b20-b039-244da00236ac {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1672.589316] env[63024]: DEBUG nova.compute.manager [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1672.621365] env[63024]: DEBUG nova.virt.hardware [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T11:07:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='1197457257',id=34,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-1342542410',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1672.621602] env[63024]: DEBUG nova.virt.hardware [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1672.621849] env[63024]: DEBUG nova.virt.hardware [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1672.621944] env[63024]: DEBUG nova.virt.hardware [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1672.622119] env[63024]: DEBUG nova.virt.hardware [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1672.622268] env[63024]: DEBUG nova.virt.hardware [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1672.622469] env[63024]: DEBUG nova.virt.hardware [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1672.622627] env[63024]: DEBUG nova.virt.hardware [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1672.622791] env[63024]: DEBUG nova.virt.hardware [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1672.622951] env[63024]: DEBUG nova.virt.hardware [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1672.623156] env[63024]: DEBUG nova.virt.hardware [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1672.624022] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ff64186-9f16-4c89-8e80-d46d37d7a888 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.632088] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb119c90-03bb-431b-89b6-e83423a51b97 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1672.706051] env[63024]: DEBUG nova.scheduler.client.report [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Updated inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with generation 56 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1672.706339] env[63024]: DEBUG nova.compute.provider_tree [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Updating resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b generation from 56 to 57 during operation: update_inventory {{(pid=63024) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1672.706521] env[63024]: DEBUG nova.compute.provider_tree [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1672.751754] env[63024]: DEBUG oslo_vmware.api [None req-937596a1-291a-4c0f-a0d4-2ef4220afec7 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950679, 'name': CloneVM_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1672.868978] env[63024]: DEBUG oslo_vmware.api [None req-74e00bbc-9ded-41a3-ad3a-f6152c6eedcd tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950680, 'name': RemoveSnapshot_Task, 'duration_secs': 0.917336} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1672.869317] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-74e00bbc-9ded-41a3-ad3a-f6152c6eedcd tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Deleted Snapshot of the VM instance {{(pid=63024) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1672.869581] env[63024]: INFO nova.compute.manager [None req-74e00bbc-9ded-41a3-ad3a-f6152c6eedcd tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Took 14.28 seconds to snapshot the instance on the hypervisor. [ 1673.032072] env[63024]: DEBUG nova.virt.hardware [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1673.032308] env[63024]: DEBUG nova.virt.hardware [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1673.032486] env[63024]: DEBUG nova.virt.hardware [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1673.032710] env[63024]: DEBUG nova.virt.hardware [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1673.032895] env[63024]: DEBUG nova.virt.hardware [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1673.034505] env[63024]: DEBUG nova.virt.hardware [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1673.034505] env[63024]: DEBUG nova.virt.hardware [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1673.034505] env[63024]: DEBUG nova.virt.hardware [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1673.034505] env[63024]: DEBUG nova.virt.hardware [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1673.034505] env[63024]: DEBUG nova.virt.hardware [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1673.034760] env[63024]: DEBUG nova.virt.hardware [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1673.034916] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6797067b-3ab2-4f36-8914-b3a18cd1377b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.045288] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-470466a3-2eec-46cc-bd65-be543767890d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.061245] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:01:9d:f0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'abe48956-848a-4e1f-b1f1-a27baa5390b9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9a7cae93-d7ad-4e7c-b18b-00e7e749299e', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1673.068827] env[63024]: DEBUG oslo.service.loopingcall [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1673.069131] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 610dd030-5080-498a-8744-b1411297d70d] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1673.069382] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5b5986b2-65bb-477d-868f-93037eee2e4a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.091048] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1673.091048] env[63024]: value = "task-1950681" [ 1673.091048] env[63024]: _type = "Task" [ 1673.091048] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.099081] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950681, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.211762] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.650s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1673.212881] env[63024]: DEBUG oslo_concurrency.lockutils [None req-28055a88-5b7f-4a76-af3b-355523600d08 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.364s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1673.213131] env[63024]: DEBUG nova.objects.instance [None req-28055a88-5b7f-4a76-af3b-355523600d08 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Lazy-loading 'resources' on Instance uuid 17e1dfa2-b104-4aac-928e-6364da155c3d {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1673.236660] env[63024]: DEBUG nova.network.neutron [req-9f51fd18-522a-44a5-b2bf-18ee46a2bcc2 req-f67f21b0-8485-44ca-b24d-9cc6ab62b97a service nova] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Updated VIF entry in instance network info cache for port 36bc0224-4621-4b20-b039-244da00236ac. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1673.237069] env[63024]: DEBUG nova.network.neutron [req-9f51fd18-522a-44a5-b2bf-18ee46a2bcc2 req-f67f21b0-8485-44ca-b24d-9cc6ab62b97a service nova] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Updating instance_info_cache with network_info: [{"id": "36bc0224-4621-4b20-b039-244da00236ac", "address": "fa:16:3e:28:b4:46", "network": {"id": "ab62b987-c074-462f-ab8f-d851463f3ed7", "bridge": "br-int", "label": "tempest-ServersTestJSON-1458114320-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.136", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "eae1647405bf418ea6abde9723b2c895", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "604c9724-b4ef-4393-a76e-eb4a2b510796", "external-id": "nsx-vlan-transportzone-909", "segmentation_id": 909, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap36bc0224-46", "ovs_interfaceid": "36bc0224-4621-4b20-b039-244da00236ac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1673.253578] env[63024]: DEBUG oslo_vmware.api [None req-937596a1-291a-4c0f-a0d4-2ef4220afec7 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950679, 'name': CloneVM_Task, 'duration_secs': 1.473448} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1673.253850] env[63024]: INFO nova.virt.vmwareapi.vmops [None req-937596a1-291a-4c0f-a0d4-2ef4220afec7 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Created linked-clone VM from snapshot [ 1673.254628] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f1ed224-2948-4fbd-9f9f-9375f5e1553c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.262599] env[63024]: DEBUG nova.virt.vmwareapi.images [None req-937596a1-291a-4c0f-a0d4-2ef4220afec7 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Uploading image bb186550-d504-4b5e-a6f4-986b2f2973a7 {{(pid=63024) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1673.283541] env[63024]: DEBUG oslo_vmware.rw_handles [None req-937596a1-291a-4c0f-a0d4-2ef4220afec7 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1673.283541] env[63024]: value = "vm-402058" [ 1673.283541] env[63024]: _type = "VirtualMachine" [ 1673.283541] env[63024]: }. {{(pid=63024) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1673.284124] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-50777a91-4da0-45dd-8cb6-03a31dd12437 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.291767] env[63024]: DEBUG oslo_vmware.rw_handles [None req-937596a1-291a-4c0f-a0d4-2ef4220afec7 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lease: (returnval){ [ 1673.291767] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]521f22d3-2bfc-3666-e4b8-59c276dc1572" [ 1673.291767] env[63024]: _type = "HttpNfcLease" [ 1673.291767] env[63024]: } obtained for exporting VM: (result){ [ 1673.291767] env[63024]: value = "vm-402058" [ 1673.291767] env[63024]: _type = "VirtualMachine" [ 1673.291767] env[63024]: }. {{(pid=63024) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1673.292090] env[63024]: DEBUG oslo_vmware.api [None req-937596a1-291a-4c0f-a0d4-2ef4220afec7 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the lease: (returnval){ [ 1673.292090] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]521f22d3-2bfc-3666-e4b8-59c276dc1572" [ 1673.292090] env[63024]: _type = "HttpNfcLease" [ 1673.292090] env[63024]: } to be ready. {{(pid=63024) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1673.299091] env[63024]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1673.299091] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]521f22d3-2bfc-3666-e4b8-59c276dc1572" [ 1673.299091] env[63024]: _type = "HttpNfcLease" [ 1673.299091] env[63024]: } is initializing. {{(pid=63024) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1673.600280] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950681, 'name': CreateVM_Task, 'duration_secs': 0.346682} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1673.600565] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 610dd030-5080-498a-8744-b1411297d70d] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1673.601174] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1673.601336] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1673.601639] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1673.601886] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8110e142-1fc4-4120-8049-566f52f8ce8d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.606240] env[63024]: DEBUG oslo_vmware.api [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1673.606240] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]520c00e2-6925-57fb-e090-fd188062bc32" [ 1673.606240] env[63024]: _type = "Task" [ 1673.606240] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1673.614216] env[63024]: DEBUG oslo_vmware.api [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]520c00e2-6925-57fb-e090-fd188062bc32, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1673.716082] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Acquiring lock "c396e967-e07b-4574-9eea-47182a9f2dc5" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1673.716402] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Lock "c396e967-e07b-4574-9eea-47182a9f2dc5" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1673.739250] env[63024]: DEBUG oslo_concurrency.lockutils [req-9f51fd18-522a-44a5-b2bf-18ee46a2bcc2 req-f67f21b0-8485-44ca-b24d-9cc6ab62b97a service nova] Releasing lock "refresh_cache-8a826350-0fee-409d-a3fc-260d7d43bdf6" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1673.802718] env[63024]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1673.802718] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]521f22d3-2bfc-3666-e4b8-59c276dc1572" [ 1673.802718] env[63024]: _type = "HttpNfcLease" [ 1673.802718] env[63024]: } is ready. {{(pid=63024) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1673.803039] env[63024]: DEBUG oslo_vmware.rw_handles [None req-937596a1-291a-4c0f-a0d4-2ef4220afec7 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1673.803039] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]521f22d3-2bfc-3666-e4b8-59c276dc1572" [ 1673.803039] env[63024]: _type = "HttpNfcLease" [ 1673.803039] env[63024]: }. {{(pid=63024) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1673.803773] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32bd171f-5623-4c3e-a8fc-6ac2fca15dff {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1673.812581] env[63024]: DEBUG nova.network.neutron [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Successfully updated port: 008138b2-5e34-470d-b8f1-93b1ca8df541 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1673.818034] env[63024]: DEBUG oslo_vmware.rw_handles [None req-937596a1-291a-4c0f-a0d4-2ef4220afec7 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52897bf6-a52c-0924-0623-0550e2456f96/disk-0.vmdk from lease info. {{(pid=63024) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1673.818383] env[63024]: DEBUG oslo_vmware.rw_handles [None req-937596a1-291a-4c0f-a0d4-2ef4220afec7 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52897bf6-a52c-0924-0623-0550e2456f96/disk-0.vmdk for reading. {{(pid=63024) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1674.001718] env[63024]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-5eee6930-d45b-4ca3-82d5-3f97c1bd0fdb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.118417] env[63024]: DEBUG oslo_vmware.api [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]520c00e2-6925-57fb-e090-fd188062bc32, 'name': SearchDatastore_Task, 'duration_secs': 0.011281} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1674.119879] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1674.120165] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1674.120402] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1674.120547] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1674.120722] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1674.121096] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6a19fb7e-a973-4a0c-b26f-b961a1e1ecae {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.131042] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1674.132009] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1674.132849] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fea7b6c4-2c7d-4306-b170-b61a52086c14 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.143844] env[63024]: DEBUG oslo_vmware.api [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1674.143844] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52d5f9f2-83f1-ea0e-afa3-0e5221f8a660" [ 1674.143844] env[63024]: _type = "Task" [ 1674.143844] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1674.152695] env[63024]: DEBUG oslo_vmware.api [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52d5f9f2-83f1-ea0e-afa3-0e5221f8a660, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.212408] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3af13dff-64e9-443a-a834-c3515ed9a8c8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.221431] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bffe33d4-c484-4861-84e4-19dec7e104e4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.225510] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Lock "c396e967-e07b-4574-9eea-47182a9f2dc5" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.509s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1674.226075] env[63024]: DEBUG nova.compute.manager [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1674.266134] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f6b8a45-6a36-4173-8058-6e04b5ff199b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.279468] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3f5f476-89ee-42c1-b741-e4d0dc99547b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.294898] env[63024]: DEBUG nova.compute.provider_tree [None req-28055a88-5b7f-4a76-af3b-355523600d08 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1674.323726] env[63024]: DEBUG oslo_concurrency.lockutils [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Acquiring lock "refresh_cache-18444b47-476a-4ca3-9a4f-0dc58e652143" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1674.323726] env[63024]: DEBUG oslo_concurrency.lockutils [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Acquired lock "refresh_cache-18444b47-476a-4ca3-9a4f-0dc58e652143" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1674.323726] env[63024]: DEBUG nova.network.neutron [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1674.556696] env[63024]: DEBUG nova.compute.manager [req-2f1094cb-5c79-4ff4-b256-99dd08ef24c8 req-0846884f-7583-44cc-ac29-c5659807d4ac service nova] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Received event network-vif-plugged-008138b2-5e34-470d-b8f1-93b1ca8df541 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1674.557067] env[63024]: DEBUG oslo_concurrency.lockutils [req-2f1094cb-5c79-4ff4-b256-99dd08ef24c8 req-0846884f-7583-44cc-ac29-c5659807d4ac service nova] Acquiring lock "18444b47-476a-4ca3-9a4f-0dc58e652143-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1674.557344] env[63024]: DEBUG oslo_concurrency.lockutils [req-2f1094cb-5c79-4ff4-b256-99dd08ef24c8 req-0846884f-7583-44cc-ac29-c5659807d4ac service nova] Lock "18444b47-476a-4ca3-9a4f-0dc58e652143-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1674.557537] env[63024]: DEBUG oslo_concurrency.lockutils [req-2f1094cb-5c79-4ff4-b256-99dd08ef24c8 req-0846884f-7583-44cc-ac29-c5659807d4ac service nova] Lock "18444b47-476a-4ca3-9a4f-0dc58e652143-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1674.557711] env[63024]: DEBUG nova.compute.manager [req-2f1094cb-5c79-4ff4-b256-99dd08ef24c8 req-0846884f-7583-44cc-ac29-c5659807d4ac service nova] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] No waiting events found dispatching network-vif-plugged-008138b2-5e34-470d-b8f1-93b1ca8df541 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1674.557919] env[63024]: WARNING nova.compute.manager [req-2f1094cb-5c79-4ff4-b256-99dd08ef24c8 req-0846884f-7583-44cc-ac29-c5659807d4ac service nova] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Received unexpected event network-vif-plugged-008138b2-5e34-470d-b8f1-93b1ca8df541 for instance with vm_state building and task_state spawning. [ 1674.558097] env[63024]: DEBUG nova.compute.manager [req-2f1094cb-5c79-4ff4-b256-99dd08ef24c8 req-0846884f-7583-44cc-ac29-c5659807d4ac service nova] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Received event network-changed-008138b2-5e34-470d-b8f1-93b1ca8df541 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1674.558347] env[63024]: DEBUG nova.compute.manager [req-2f1094cb-5c79-4ff4-b256-99dd08ef24c8 req-0846884f-7583-44cc-ac29-c5659807d4ac service nova] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Refreshing instance network info cache due to event network-changed-008138b2-5e34-470d-b8f1-93b1ca8df541. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1674.558540] env[63024]: DEBUG oslo_concurrency.lockutils [req-2f1094cb-5c79-4ff4-b256-99dd08ef24c8 req-0846884f-7583-44cc-ac29-c5659807d4ac service nova] Acquiring lock "refresh_cache-18444b47-476a-4ca3-9a4f-0dc58e652143" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1674.657537] env[63024]: DEBUG oslo_vmware.api [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52d5f9f2-83f1-ea0e-afa3-0e5221f8a660, 'name': SearchDatastore_Task, 'duration_secs': 0.009789} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1674.658601] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53ae4e98-b2a7-4ad0-9392-b675ebd3ea11 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1674.664882] env[63024]: DEBUG oslo_vmware.api [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1674.664882] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e9d648-cc96-f9d7-e1b1-7fd62f8a3029" [ 1674.664882] env[63024]: _type = "Task" [ 1674.664882] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1674.674971] env[63024]: DEBUG oslo_vmware.api [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e9d648-cc96-f9d7-e1b1-7fd62f8a3029, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1674.731880] env[63024]: DEBUG nova.compute.utils [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1674.733575] env[63024]: DEBUG nova.compute.manager [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1674.735528] env[63024]: DEBUG nova.network.neutron [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1674.774396] env[63024]: DEBUG nova.policy [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ccf6ca0439af4c608a2e0451e3c59dba', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9dce1df3c8774a7cb1429b5d3f5c046f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1674.798021] env[63024]: DEBUG nova.scheduler.client.report [None req-28055a88-5b7f-4a76-af3b-355523600d08 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1674.861580] env[63024]: DEBUG nova.network.neutron [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1675.005643] env[63024]: DEBUG nova.network.neutron [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Updating instance_info_cache with network_info: [{"id": "008138b2-5e34-470d-b8f1-93b1ca8df541", "address": "fa:16:3e:89:7f:c6", "network": {"id": "1c8709f7-097f-4437-bae1-c45d9a4d02f1", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1121041191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9981ec11228244fd8b75ee951a940c85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0248a27a-1d7f-4195-987b-06bfc8467347", "external-id": "nsx-vlan-transportzone-26", "segmentation_id": 26, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap008138b2-5e", "ovs_interfaceid": "008138b2-5e34-470d-b8f1-93b1ca8df541", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1675.076936] env[63024]: DEBUG nova.network.neutron [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] Successfully created port: 77a205c3-534e-4d19-9df6-e9009252110a {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1675.175543] env[63024]: DEBUG oslo_vmware.api [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e9d648-cc96-f9d7-e1b1-7fd62f8a3029, 'name': SearchDatastore_Task, 'duration_secs': 0.010996} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1675.175543] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1675.175740] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 610dd030-5080-498a-8744-b1411297d70d/610dd030-5080-498a-8744-b1411297d70d.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1675.175915] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c9128f2a-417a-48a2-9df2-3d90ee7ec50c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.183768] env[63024]: DEBUG oslo_vmware.api [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1675.183768] env[63024]: value = "task-1950683" [ 1675.183768] env[63024]: _type = "Task" [ 1675.183768] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.192083] env[63024]: DEBUG oslo_vmware.api [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950683, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.238112] env[63024]: DEBUG nova.compute.manager [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1675.281690] env[63024]: DEBUG nova.compute.manager [None req-037616c5-d6cf-4b90-bf58-aa2ad2350df3 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1675.282847] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9df6f214-9d4e-4ab2-98a2-a32e2eda3a76 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.303726] env[63024]: DEBUG oslo_concurrency.lockutils [None req-28055a88-5b7f-4a76-af3b-355523600d08 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.091s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1675.306204] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7b8e5583-2124-4efb-b280-1b22454be363 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.897s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1675.306456] env[63024]: DEBUG nova.objects.instance [None req-7b8e5583-2124-4efb-b280-1b22454be363 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Lazy-loading 'resources' on Instance uuid 726d9639-1ab4-46a9-975e-5580c8344a37 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1675.331168] env[63024]: INFO nova.scheduler.client.report [None req-28055a88-5b7f-4a76-af3b-355523600d08 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Deleted allocations for instance 17e1dfa2-b104-4aac-928e-6364da155c3d [ 1675.508896] env[63024]: DEBUG oslo_concurrency.lockutils [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Releasing lock "refresh_cache-18444b47-476a-4ca3-9a4f-0dc58e652143" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1675.509444] env[63024]: DEBUG nova.compute.manager [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Instance network_info: |[{"id": "008138b2-5e34-470d-b8f1-93b1ca8df541", "address": "fa:16:3e:89:7f:c6", "network": {"id": "1c8709f7-097f-4437-bae1-c45d9a4d02f1", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1121041191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9981ec11228244fd8b75ee951a940c85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0248a27a-1d7f-4195-987b-06bfc8467347", "external-id": "nsx-vlan-transportzone-26", "segmentation_id": 26, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap008138b2-5e", "ovs_interfaceid": "008138b2-5e34-470d-b8f1-93b1ca8df541", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1675.509933] env[63024]: DEBUG oslo_concurrency.lockutils [req-2f1094cb-5c79-4ff4-b256-99dd08ef24c8 req-0846884f-7583-44cc-ac29-c5659807d4ac service nova] Acquired lock "refresh_cache-18444b47-476a-4ca3-9a4f-0dc58e652143" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1675.510298] env[63024]: DEBUG nova.network.neutron [req-2f1094cb-5c79-4ff4-b256-99dd08ef24c8 req-0846884f-7583-44cc-ac29-c5659807d4ac service nova] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Refreshing network info cache for port 008138b2-5e34-470d-b8f1-93b1ca8df541 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1675.511768] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:89:7f:c6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0248a27a-1d7f-4195-987b-06bfc8467347', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '008138b2-5e34-470d-b8f1-93b1ca8df541', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1675.522297] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Creating folder: Project (9981ec11228244fd8b75ee951a940c85). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1675.523964] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-26a6b874-d3f3-4052-8eca-ef9770e838c1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.536727] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Created folder: Project (9981ec11228244fd8b75ee951a940c85) in parent group-v401959. [ 1675.537035] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Creating folder: Instances. Parent ref: group-v402060. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1675.537393] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-319ce503-cea8-4030-93d7-08dd6d584879 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.548006] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Created folder: Instances in parent group-v402060. [ 1675.548344] env[63024]: DEBUG oslo.service.loopingcall [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1675.548552] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1675.548821] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c366e563-ab65-4277-b025-f735719635ee {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.571818] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1675.571818] env[63024]: value = "task-1950686" [ 1675.571818] env[63024]: _type = "Task" [ 1675.571818] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1675.581237] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950686, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.695324] env[63024]: DEBUG oslo_vmware.api [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950683, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1675.795195] env[63024]: INFO nova.compute.manager [None req-037616c5-d6cf-4b90-bf58-aa2ad2350df3 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] instance snapshotting [ 1675.798385] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-781df767-5138-4909-b35d-efddf4597cec {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.824279] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ea365b0-1e6c-46b9-ab4e-849c0e445da3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1675.840119] env[63024]: DEBUG oslo_concurrency.lockutils [None req-28055a88-5b7f-4a76-af3b-355523600d08 tempest-ServerMetadataTestJSON-1461891895 tempest-ServerMetadataTestJSON-1461891895-project-member] Lock "17e1dfa2-b104-4aac-928e-6364da155c3d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.749s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1676.087042] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950686, 'name': CreateVM_Task, 'duration_secs': 0.429124} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1676.087042] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1676.087829] env[63024]: DEBUG oslo_concurrency.lockutils [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1676.088014] env[63024]: DEBUG oslo_concurrency.lockutils [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1676.088335] env[63024]: DEBUG oslo_concurrency.lockutils [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1676.088588] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c638b52-add7-4a06-b11d-2758e05b03fb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.095908] env[63024]: DEBUG oslo_vmware.api [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Waiting for the task: (returnval){ [ 1676.095908] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e60bf7-c950-abf6-d19a-c3b7bb873bad" [ 1676.095908] env[63024]: _type = "Task" [ 1676.095908] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.103946] env[63024]: DEBUG oslo_vmware.api [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e60bf7-c950-abf6-d19a-c3b7bb873bad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.197086] env[63024]: DEBUG oslo_vmware.api [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950683, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.525349} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1676.197303] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 610dd030-5080-498a-8744-b1411297d70d/610dd030-5080-498a-8744-b1411297d70d.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1676.197506] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1676.197759] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-35eaa878-28b3-4bff-8707-6a2918034d48 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.204919] env[63024]: DEBUG oslo_vmware.api [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1676.204919] env[63024]: value = "task-1950687" [ 1676.204919] env[63024]: _type = "Task" [ 1676.204919] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.215049] env[63024]: DEBUG oslo_vmware.api [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950687, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.248606] env[63024]: DEBUG nova.compute.manager [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1676.251466] env[63024]: DEBUG nova.network.neutron [req-2f1094cb-5c79-4ff4-b256-99dd08ef24c8 req-0846884f-7583-44cc-ac29-c5659807d4ac service nova] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Updated VIF entry in instance network info cache for port 008138b2-5e34-470d-b8f1-93b1ca8df541. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1676.251880] env[63024]: DEBUG nova.network.neutron [req-2f1094cb-5c79-4ff4-b256-99dd08ef24c8 req-0846884f-7583-44cc-ac29-c5659807d4ac service nova] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Updating instance_info_cache with network_info: [{"id": "008138b2-5e34-470d-b8f1-93b1ca8df541", "address": "fa:16:3e:89:7f:c6", "network": {"id": "1c8709f7-097f-4437-bae1-c45d9a4d02f1", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1121041191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9981ec11228244fd8b75ee951a940c85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0248a27a-1d7f-4195-987b-06bfc8467347", "external-id": "nsx-vlan-transportzone-26", "segmentation_id": 26, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap008138b2-5e", "ovs_interfaceid": "008138b2-5e34-470d-b8f1-93b1ca8df541", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1676.275363] env[63024]: DEBUG nova.virt.hardware [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1676.276208] env[63024]: DEBUG nova.virt.hardware [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1676.276208] env[63024]: DEBUG nova.virt.hardware [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1676.276208] env[63024]: DEBUG nova.virt.hardware [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1676.276208] env[63024]: DEBUG nova.virt.hardware [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1676.276208] env[63024]: DEBUG nova.virt.hardware [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1676.276375] env[63024]: DEBUG nova.virt.hardware [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1676.276429] env[63024]: DEBUG nova.virt.hardware [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1676.276585] env[63024]: DEBUG nova.virt.hardware [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1676.276738] env[63024]: DEBUG nova.virt.hardware [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1676.276908] env[63024]: DEBUG nova.virt.hardware [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1676.278064] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-518dbd36-507e-4075-8bba-78c492cc79d4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.288542] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e490f818-ded4-414a-b5d6-19846d38eb48 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.336265] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-037616c5-d6cf-4b90-bf58-aa2ad2350df3 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Creating Snapshot of the VM instance {{(pid=63024) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1676.336265] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-d5081120-32d4-4315-b7ac-2dd69d1eb320 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.345525] env[63024]: DEBUG oslo_vmware.api [None req-037616c5-d6cf-4b90-bf58-aa2ad2350df3 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Waiting for the task: (returnval){ [ 1676.345525] env[63024]: value = "task-1950688" [ 1676.345525] env[63024]: _type = "Task" [ 1676.345525] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.351394] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12dc88e0-7e4a-4725-8538-68696cc6d826 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.357693] env[63024]: DEBUG oslo_vmware.api [None req-037616c5-d6cf-4b90-bf58-aa2ad2350df3 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950688, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.364118] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8984670-0974-4c50-b128-626748b002f6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.405963] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f3b3126-7789-4bb9-ad87-e6b9d942d3d2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.414631] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42e0fd32-99ff-4cb0-9834-d702581216dc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.429062] env[63024]: DEBUG nova.compute.provider_tree [None req-7b8e5583-2124-4efb-b280-1b22454be363 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1676.606581] env[63024]: DEBUG oslo_vmware.api [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e60bf7-c950-abf6-d19a-c3b7bb873bad, 'name': SearchDatastore_Task, 'duration_secs': 0.010126} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1676.606900] env[63024]: DEBUG oslo_concurrency.lockutils [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1676.607146] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1676.607385] env[63024]: DEBUG oslo_concurrency.lockutils [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1676.607527] env[63024]: DEBUG oslo_concurrency.lockutils [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1676.607700] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1676.607994] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-88606718-4db9-495e-98f6-2c78113a9982 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.616887] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1676.617120] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1676.617887] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6838175-5643-4f4f-8f65-03ae08f38766 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.623638] env[63024]: DEBUG oslo_vmware.api [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Waiting for the task: (returnval){ [ 1676.623638] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5255e449-bb4a-a83d-be4b-226ba1f98201" [ 1676.623638] env[63024]: _type = "Task" [ 1676.623638] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.631492] env[63024]: DEBUG oslo_vmware.api [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5255e449-bb4a-a83d-be4b-226ba1f98201, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.714394] env[63024]: DEBUG oslo_vmware.api [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950687, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073846} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1676.714921] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1676.715784] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a836793f-defd-4e76-9f6b-44c9cd6d3416 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.737894] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Reconfiguring VM instance instance-0000001e to attach disk [datastore1] 610dd030-5080-498a-8744-b1411297d70d/610dd030-5080-498a-8744-b1411297d70d.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1676.738278] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2f7a6802-9ee3-45fc-b806-8c7c516dfb82 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.755519] env[63024]: DEBUG oslo_concurrency.lockutils [req-2f1094cb-5c79-4ff4-b256-99dd08ef24c8 req-0846884f-7583-44cc-ac29-c5659807d4ac service nova] Releasing lock "refresh_cache-18444b47-476a-4ca3-9a4f-0dc58e652143" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1676.759932] env[63024]: DEBUG oslo_vmware.api [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1676.759932] env[63024]: value = "task-1950689" [ 1676.759932] env[63024]: _type = "Task" [ 1676.759932] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1676.772659] env[63024]: DEBUG oslo_vmware.api [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950689, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1676.857203] env[63024]: DEBUG oslo_vmware.api [None req-037616c5-d6cf-4b90-bf58-aa2ad2350df3 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950688, 'name': CreateSnapshot_Task, 'duration_secs': 0.487612} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1676.857501] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-037616c5-d6cf-4b90-bf58-aa2ad2350df3 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Created Snapshot of the VM instance {{(pid=63024) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1676.858503] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c5caa67-6cc9-4f35-8a35-f1b237623547 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1676.933915] env[63024]: DEBUG nova.scheduler.client.report [None req-7b8e5583-2124-4efb-b280-1b22454be363 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1676.939770] env[63024]: DEBUG nova.compute.manager [req-3e9ea1f7-9713-49cc-bda5-5d00d7b72078 req-95b76a5a-59bc-4078-8f3c-2886ffcf80f6 service nova] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] Received event network-vif-plugged-77a205c3-534e-4d19-9df6-e9009252110a {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1676.940083] env[63024]: DEBUG oslo_concurrency.lockutils [req-3e9ea1f7-9713-49cc-bda5-5d00d7b72078 req-95b76a5a-59bc-4078-8f3c-2886ffcf80f6 service nova] Acquiring lock "94d9210e-ca8d-4ef1-a640-2d9a11ad87d3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1676.940310] env[63024]: DEBUG oslo_concurrency.lockutils [req-3e9ea1f7-9713-49cc-bda5-5d00d7b72078 req-95b76a5a-59bc-4078-8f3c-2886ffcf80f6 service nova] Lock "94d9210e-ca8d-4ef1-a640-2d9a11ad87d3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1676.940479] env[63024]: DEBUG oslo_concurrency.lockutils [req-3e9ea1f7-9713-49cc-bda5-5d00d7b72078 req-95b76a5a-59bc-4078-8f3c-2886ffcf80f6 service nova] Lock "94d9210e-ca8d-4ef1-a640-2d9a11ad87d3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1676.944018] env[63024]: DEBUG nova.compute.manager [req-3e9ea1f7-9713-49cc-bda5-5d00d7b72078 req-95b76a5a-59bc-4078-8f3c-2886ffcf80f6 service nova] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] No waiting events found dispatching network-vif-plugged-77a205c3-534e-4d19-9df6-e9009252110a {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1676.944018] env[63024]: WARNING nova.compute.manager [req-3e9ea1f7-9713-49cc-bda5-5d00d7b72078 req-95b76a5a-59bc-4078-8f3c-2886ffcf80f6 service nova] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] Received unexpected event network-vif-plugged-77a205c3-534e-4d19-9df6-e9009252110a for instance with vm_state building and task_state spawning. [ 1677.134981] env[63024]: DEBUG oslo_vmware.api [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5255e449-bb4a-a83d-be4b-226ba1f98201, 'name': SearchDatastore_Task, 'duration_secs': 0.011857} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1677.135996] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-122951b8-565f-4b49-9da0-d3ca3ae3d4ab {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.141543] env[63024]: DEBUG oslo_vmware.api [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Waiting for the task: (returnval){ [ 1677.141543] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b78eb0-f376-9b59-aa19-37a678e02e99" [ 1677.141543] env[63024]: _type = "Task" [ 1677.141543] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1677.149420] env[63024]: DEBUG oslo_vmware.api [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b78eb0-f376-9b59-aa19-37a678e02e99, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.206090] env[63024]: DEBUG nova.network.neutron [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] Successfully updated port: 77a205c3-534e-4d19-9df6-e9009252110a {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1677.273945] env[63024]: DEBUG oslo_vmware.api [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950689, 'name': ReconfigVM_Task, 'duration_secs': 0.291331} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1677.274518] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Reconfigured VM instance instance-0000001e to attach disk [datastore1] 610dd030-5080-498a-8744-b1411297d70d/610dd030-5080-498a-8744-b1411297d70d.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1677.275481] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-eb54391c-05f3-4e7f-b13d-c63b9c8801fc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.283619] env[63024]: DEBUG oslo_vmware.api [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1677.283619] env[63024]: value = "task-1950690" [ 1677.283619] env[63024]: _type = "Task" [ 1677.283619] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1677.293615] env[63024]: DEBUG oslo_vmware.api [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950690, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.377417] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-037616c5-d6cf-4b90-bf58-aa2ad2350df3 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Creating linked-clone VM from snapshot {{(pid=63024) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1677.378910] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-33662841-9b35-4cf0-a3eb-7c37a5ca23ff {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.386946] env[63024]: DEBUG oslo_vmware.api [None req-037616c5-d6cf-4b90-bf58-aa2ad2350df3 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Waiting for the task: (returnval){ [ 1677.386946] env[63024]: value = "task-1950691" [ 1677.386946] env[63024]: _type = "Task" [ 1677.386946] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1677.396323] env[63024]: DEBUG oslo_vmware.api [None req-037616c5-d6cf-4b90-bf58-aa2ad2350df3 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950691, 'name': CloneVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.440064] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7b8e5583-2124-4efb-b280-1b22454be363 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.134s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1677.442749] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c0df2455-e62c-49b6-950d-7e7d146e4c7c tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.775s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1677.443058] env[63024]: DEBUG nova.objects.instance [None req-c0df2455-e62c-49b6-950d-7e7d146e4c7c tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Lazy-loading 'resources' on Instance uuid e3c9e9de-586d-4baa-b4bb-95c41d527a03 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1677.473525] env[63024]: INFO nova.scheduler.client.report [None req-7b8e5583-2124-4efb-b280-1b22454be363 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Deleted allocations for instance 726d9639-1ab4-46a9-975e-5580c8344a37 [ 1677.653329] env[63024]: DEBUG oslo_vmware.api [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b78eb0-f376-9b59-aa19-37a678e02e99, 'name': SearchDatastore_Task, 'duration_secs': 0.013486} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1677.653608] env[63024]: DEBUG oslo_concurrency.lockutils [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1677.653871] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 18444b47-476a-4ca3-9a4f-0dc58e652143/18444b47-476a-4ca3-9a4f-0dc58e652143.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1677.654152] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8298210a-62e6-4d2a-a429-ad6cbbf39519 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.660973] env[63024]: DEBUG oslo_vmware.api [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Waiting for the task: (returnval){ [ 1677.660973] env[63024]: value = "task-1950692" [ 1677.660973] env[63024]: _type = "Task" [ 1677.660973] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1677.670585] env[63024]: DEBUG oslo_vmware.api [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Task: {'id': task-1950692, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.712202] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Acquiring lock "refresh_cache-94d9210e-ca8d-4ef1-a640-2d9a11ad87d3" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1677.712202] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Acquired lock "refresh_cache-94d9210e-ca8d-4ef1-a640-2d9a11ad87d3" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1677.712202] env[63024]: DEBUG nova.network.neutron [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1677.796217] env[63024]: DEBUG oslo_vmware.api [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950690, 'name': Rename_Task, 'duration_secs': 0.421109} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1677.796587] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1677.796832] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6b81d120-6ef7-44d8-9776-ed2f80aac90d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1677.804303] env[63024]: DEBUG oslo_vmware.api [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1677.804303] env[63024]: value = "task-1950693" [ 1677.804303] env[63024]: _type = "Task" [ 1677.804303] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1677.818059] env[63024]: DEBUG oslo_vmware.api [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950693, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.898888] env[63024]: DEBUG oslo_vmware.api [None req-037616c5-d6cf-4b90-bf58-aa2ad2350df3 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950691, 'name': CloneVM_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1677.986981] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7b8e5583-2124-4efb-b280-1b22454be363 tempest-ServersAdmin275Test-201909302 tempest-ServersAdmin275Test-201909302-project-member] Lock "726d9639-1ab4-46a9-975e-5580c8344a37" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.413s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1678.174566] env[63024]: DEBUG oslo_vmware.api [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Task: {'id': task-1950692, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.303900] env[63024]: DEBUG nova.network.neutron [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1678.319955] env[63024]: DEBUG oslo_vmware.api [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950693, 'name': PowerOnVM_Task} progress is 1%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.410604] env[63024]: DEBUG oslo_vmware.api [None req-037616c5-d6cf-4b90-bf58-aa2ad2350df3 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950691, 'name': CloneVM_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.468037] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df831778-c1b9-4f58-9582-0794487a7a60 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.475169] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-375348eb-1cc1-4720-8b9a-b00c2c8499c5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.506630] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e3c3b91-a113-4521-92f0-00e2f7dd7397 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.517855] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c16a5256-be3d-4c21-b76d-418009d62ecd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.533242] env[63024]: DEBUG nova.compute.provider_tree [None req-c0df2455-e62c-49b6-950d-7e7d146e4c7c tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1678.550420] env[63024]: DEBUG nova.network.neutron [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] Updating instance_info_cache with network_info: [{"id": "77a205c3-534e-4d19-9df6-e9009252110a", "address": "fa:16:3e:08:3f:40", "network": {"id": "3646ed27-80e1-4f7d-a885-5755241c9d98", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-757896701-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9dce1df3c8774a7cb1429b5d3f5c046f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ded8bac-871f-491b-94ec-cb67c08bc828", "external-id": "nsx-vlan-transportzone-212", "segmentation_id": 212, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77a205c3-53", "ovs_interfaceid": "77a205c3-534e-4d19-9df6-e9009252110a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1678.675872] env[63024]: DEBUG oslo_vmware.api [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Task: {'id': task-1950692, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.796646} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1678.676188] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 18444b47-476a-4ca3-9a4f-0dc58e652143/18444b47-476a-4ca3-9a4f-0dc58e652143.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1678.676338] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1678.676700] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d7d79bc7-37a3-4e5c-9ebc-a642a0689a9c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1678.685297] env[63024]: DEBUG oslo_vmware.api [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Waiting for the task: (returnval){ [ 1678.685297] env[63024]: value = "task-1950694" [ 1678.685297] env[63024]: _type = "Task" [ 1678.685297] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1678.693607] env[63024]: DEBUG oslo_vmware.api [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Task: {'id': task-1950694, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.816212] env[63024]: DEBUG oslo_vmware.api [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950693, 'name': PowerOnVM_Task} progress is 1%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1678.901091] env[63024]: DEBUG oslo_vmware.api [None req-037616c5-d6cf-4b90-bf58-aa2ad2350df3 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950691, 'name': CloneVM_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.012474] env[63024]: DEBUG nova.compute.manager [req-4a541910-a759-4b67-bfca-daeb873b9e09 req-a044e689-c181-4796-b7d4-adb58d0c91d9 service nova] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] Received event network-changed-77a205c3-534e-4d19-9df6-e9009252110a {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1679.012680] env[63024]: DEBUG nova.compute.manager [req-4a541910-a759-4b67-bfca-daeb873b9e09 req-a044e689-c181-4796-b7d4-adb58d0c91d9 service nova] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] Refreshing instance network info cache due to event network-changed-77a205c3-534e-4d19-9df6-e9009252110a. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1679.012871] env[63024]: DEBUG oslo_concurrency.lockutils [req-4a541910-a759-4b67-bfca-daeb873b9e09 req-a044e689-c181-4796-b7d4-adb58d0c91d9 service nova] Acquiring lock "refresh_cache-94d9210e-ca8d-4ef1-a640-2d9a11ad87d3" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1679.036465] env[63024]: DEBUG nova.scheduler.client.report [None req-c0df2455-e62c-49b6-950d-7e7d146e4c7c tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1679.055244] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Releasing lock "refresh_cache-94d9210e-ca8d-4ef1-a640-2d9a11ad87d3" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1679.055244] env[63024]: DEBUG nova.compute.manager [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] Instance network_info: |[{"id": "77a205c3-534e-4d19-9df6-e9009252110a", "address": "fa:16:3e:08:3f:40", "network": {"id": "3646ed27-80e1-4f7d-a885-5755241c9d98", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-757896701-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9dce1df3c8774a7cb1429b5d3f5c046f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ded8bac-871f-491b-94ec-cb67c08bc828", "external-id": "nsx-vlan-transportzone-212", "segmentation_id": 212, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77a205c3-53", "ovs_interfaceid": "77a205c3-534e-4d19-9df6-e9009252110a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1679.055439] env[63024]: DEBUG oslo_concurrency.lockutils [req-4a541910-a759-4b67-bfca-daeb873b9e09 req-a044e689-c181-4796-b7d4-adb58d0c91d9 service nova] Acquired lock "refresh_cache-94d9210e-ca8d-4ef1-a640-2d9a11ad87d3" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1679.055439] env[63024]: DEBUG nova.network.neutron [req-4a541910-a759-4b67-bfca-daeb873b9e09 req-a044e689-c181-4796-b7d4-adb58d0c91d9 service nova] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] Refreshing network info cache for port 77a205c3-534e-4d19-9df6-e9009252110a {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1679.056442] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:08:3f:40', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0ded8bac-871f-491b-94ec-cb67c08bc828', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '77a205c3-534e-4d19-9df6-e9009252110a', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1679.065018] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Creating folder: Project (9dce1df3c8774a7cb1429b5d3f5c046f). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1679.066109] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-52f85144-6878-4867-b68c-af73318ba3b6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.078203] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Created folder: Project (9dce1df3c8774a7cb1429b5d3f5c046f) in parent group-v401959. [ 1679.078583] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Creating folder: Instances. Parent ref: group-v402065. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1679.079125] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-86be2c80-e76f-4d7d-a903-b03dbfe02f54 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.089755] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Created folder: Instances in parent group-v402065. [ 1679.090052] env[63024]: DEBUG oslo.service.loopingcall [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1679.090251] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1679.090621] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b87a6350-f5d4-4fb1-8092-d4954ddbc1bf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.111259] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1679.111259] env[63024]: value = "task-1950697" [ 1679.111259] env[63024]: _type = "Task" [ 1679.111259] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1679.118973] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950697, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.195981] env[63024]: DEBUG oslo_vmware.api [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Task: {'id': task-1950694, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086144} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1679.196421] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1679.197702] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec502263-5021-451f-9c9e-ba99c584e9d9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.242229] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Reconfiguring VM instance instance-00000021 to attach disk [datastore1] 18444b47-476a-4ca3-9a4f-0dc58e652143/18444b47-476a-4ca3-9a4f-0dc58e652143.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1679.243295] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a4b42e0f-2c41-46d5-a0fe-d26c735f417a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.270454] env[63024]: DEBUG oslo_vmware.api [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Waiting for the task: (returnval){ [ 1679.270454] env[63024]: value = "task-1950698" [ 1679.270454] env[63024]: _type = "Task" [ 1679.270454] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1679.282603] env[63024]: DEBUG oslo_vmware.api [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Task: {'id': task-1950698, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.318814] env[63024]: DEBUG oslo_vmware.api [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950693, 'name': PowerOnVM_Task} progress is 86%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.401468] env[63024]: DEBUG oslo_vmware.api [None req-037616c5-d6cf-4b90-bf58-aa2ad2350df3 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950691, 'name': CloneVM_Task, 'duration_secs': 1.880634} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1679.401786] env[63024]: INFO nova.virt.vmwareapi.vmops [None req-037616c5-d6cf-4b90-bf58-aa2ad2350df3 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Created linked-clone VM from snapshot [ 1679.402695] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e27baff9-255d-4af2-94a6-8f9cb8ba3592 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.413961] env[63024]: DEBUG nova.virt.vmwareapi.images [None req-037616c5-d6cf-4b90-bf58-aa2ad2350df3 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Uploading image e640f6db-ba55-4e75-9342-de97c265fd58 {{(pid=63024) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1679.446811] env[63024]: DEBUG oslo_vmware.rw_handles [None req-037616c5-d6cf-4b90-bf58-aa2ad2350df3 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1679.446811] env[63024]: value = "vm-402064" [ 1679.446811] env[63024]: _type = "VirtualMachine" [ 1679.446811] env[63024]: }. {{(pid=63024) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1679.447174] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-d020b0ac-dbea-424c-a3c5-a98238926c81 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.455340] env[63024]: DEBUG oslo_vmware.rw_handles [None req-037616c5-d6cf-4b90-bf58-aa2ad2350df3 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Lease: (returnval){ [ 1679.455340] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52c49c5c-b132-21f4-bc04-06ce3a2573cd" [ 1679.455340] env[63024]: _type = "HttpNfcLease" [ 1679.455340] env[63024]: } obtained for exporting VM: (result){ [ 1679.455340] env[63024]: value = "vm-402064" [ 1679.455340] env[63024]: _type = "VirtualMachine" [ 1679.455340] env[63024]: }. {{(pid=63024) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1679.455584] env[63024]: DEBUG oslo_vmware.api [None req-037616c5-d6cf-4b90-bf58-aa2ad2350df3 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Waiting for the lease: (returnval){ [ 1679.455584] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52c49c5c-b132-21f4-bc04-06ce3a2573cd" [ 1679.455584] env[63024]: _type = "HttpNfcLease" [ 1679.455584] env[63024]: } to be ready. {{(pid=63024) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1679.464296] env[63024]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1679.464296] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52c49c5c-b132-21f4-bc04-06ce3a2573cd" [ 1679.464296] env[63024]: _type = "HttpNfcLease" [ 1679.464296] env[63024]: } is initializing. {{(pid=63024) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1679.542675] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c0df2455-e62c-49b6-950d-7e7d146e4c7c tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.100s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1679.545592] env[63024]: DEBUG oslo_concurrency.lockutils [None req-272a0e64-f771-421a-823f-80b9a4754f5d tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.217s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1679.545878] env[63024]: DEBUG nova.objects.instance [None req-272a0e64-f771-421a-823f-80b9a4754f5d tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Lazy-loading 'resources' on Instance uuid 22ef5bae-f7bc-43c7-9d77-1b4547e83b24 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1679.577118] env[63024]: INFO nova.scheduler.client.report [None req-c0df2455-e62c-49b6-950d-7e7d146e4c7c tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Deleted allocations for instance e3c9e9de-586d-4baa-b4bb-95c41d527a03 [ 1679.624618] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950697, 'name': CreateVM_Task, 'duration_secs': 0.361977} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1679.624792] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1679.626462] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1679.626462] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1679.626756] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1679.626968] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f3d4442e-3000-465e-9c01-da2ea59f6438 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.632799] env[63024]: DEBUG oslo_vmware.api [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Waiting for the task: (returnval){ [ 1679.632799] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]522fe9bc-0c23-7c2e-9ac6-2c67aef9d2ef" [ 1679.632799] env[63024]: _type = "Task" [ 1679.632799] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1679.648815] env[63024]: DEBUG oslo_vmware.api [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]522fe9bc-0c23-7c2e-9ac6-2c67aef9d2ef, 'name': SearchDatastore_Task, 'duration_secs': 0.010055} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1679.648815] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1679.648815] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1679.648815] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1679.649180] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1679.649180] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1679.649180] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3c8432f8-70cb-41ac-9da9-312ec5c5dcc2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.660413] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1679.660413] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1679.660413] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-449b1833-cc57-4ad7-b35b-410b357f4875 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.665457] env[63024]: DEBUG oslo_vmware.api [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Waiting for the task: (returnval){ [ 1679.665457] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]526ec9bb-3804-7c1e-99d5-d604696bf28b" [ 1679.665457] env[63024]: _type = "Task" [ 1679.665457] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1679.674642] env[63024]: DEBUG oslo_vmware.api [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]526ec9bb-3804-7c1e-99d5-d604696bf28b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.689583] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6da2c6f0-6934-4e99-9bfe-309fcc14df69 tempest-VolumesAssistedSnapshotsTest-1116976750 tempest-VolumesAssistedSnapshotsTest-1116976750-project-admin] Acquiring lock "61fdfa06-cb40-44a3-8abc-428b26bd40f5" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1679.690190] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6da2c6f0-6934-4e99-9bfe-309fcc14df69 tempest-VolumesAssistedSnapshotsTest-1116976750 tempest-VolumesAssistedSnapshotsTest-1116976750-project-admin] Lock "61fdfa06-cb40-44a3-8abc-428b26bd40f5" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1679.784023] env[63024]: DEBUG oslo_vmware.api [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Task: {'id': task-1950698, 'name': ReconfigVM_Task, 'duration_secs': 0.334985} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1679.784322] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Reconfigured VM instance instance-00000021 to attach disk [datastore1] 18444b47-476a-4ca3-9a4f-0dc58e652143/18444b47-476a-4ca3-9a4f-0dc58e652143.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1679.784945] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b494063d-c28f-4d29-a218-cd81d3009255 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.791656] env[63024]: DEBUG oslo_vmware.api [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Waiting for the task: (returnval){ [ 1679.791656] env[63024]: value = "task-1950700" [ 1679.791656] env[63024]: _type = "Task" [ 1679.791656] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1679.800239] env[63024]: DEBUG oslo_vmware.api [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Task: {'id': task-1950700, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1679.816380] env[63024]: DEBUG oslo_vmware.api [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950693, 'name': PowerOnVM_Task, 'duration_secs': 1.66228} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1679.816771] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1679.817026] env[63024]: DEBUG nova.compute.manager [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1679.817868] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c77ac28-9014-46ba-96a5-5d23ca4c861a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.964909] env[63024]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1679.964909] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52c49c5c-b132-21f4-bc04-06ce3a2573cd" [ 1679.964909] env[63024]: _type = "HttpNfcLease" [ 1679.964909] env[63024]: } is ready. {{(pid=63024) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1679.965396] env[63024]: DEBUG oslo_vmware.rw_handles [None req-037616c5-d6cf-4b90-bf58-aa2ad2350df3 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1679.965396] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52c49c5c-b132-21f4-bc04-06ce3a2573cd" [ 1679.965396] env[63024]: _type = "HttpNfcLease" [ 1679.965396] env[63024]: }. {{(pid=63024) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1679.965932] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4750fcd-2ba5-461e-9817-1b3d65df7da1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1679.974546] env[63024]: DEBUG oslo_vmware.rw_handles [None req-037616c5-d6cf-4b90-bf58-aa2ad2350df3 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522fdc99-229a-950a-fdab-cd9eabca4680/disk-0.vmdk from lease info. {{(pid=63024) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1679.974701] env[63024]: DEBUG oslo_vmware.rw_handles [None req-037616c5-d6cf-4b90-bf58-aa2ad2350df3 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522fdc99-229a-950a-fdab-cd9eabca4680/disk-0.vmdk for reading. {{(pid=63024) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1680.058795] env[63024]: DEBUG nova.network.neutron [req-4a541910-a759-4b67-bfca-daeb873b9e09 req-a044e689-c181-4796-b7d4-adb58d0c91d9 service nova] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] Updated VIF entry in instance network info cache for port 77a205c3-534e-4d19-9df6-e9009252110a. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1680.059050] env[63024]: DEBUG nova.network.neutron [req-4a541910-a759-4b67-bfca-daeb873b9e09 req-a044e689-c181-4796-b7d4-adb58d0c91d9 service nova] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] Updating instance_info_cache with network_info: [{"id": "77a205c3-534e-4d19-9df6-e9009252110a", "address": "fa:16:3e:08:3f:40", "network": {"id": "3646ed27-80e1-4f7d-a885-5755241c9d98", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-757896701-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9dce1df3c8774a7cb1429b5d3f5c046f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ded8bac-871f-491b-94ec-cb67c08bc828", "external-id": "nsx-vlan-transportzone-212", "segmentation_id": 212, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap77a205c3-53", "ovs_interfaceid": "77a205c3-534e-4d19-9df6-e9009252110a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1680.087012] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c0df2455-e62c-49b6-950d-7e7d146e4c7c tempest-ServerExternalEventsTest-648890381 tempest-ServerExternalEventsTest-648890381-project-member] Lock "e3c9e9de-586d-4baa-b4bb-95c41d527a03" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.487s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1680.112598] env[63024]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-1a6b45eb-5802-4a9d-be21-52101d2644c6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.178526] env[63024]: DEBUG oslo_vmware.api [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]526ec9bb-3804-7c1e-99d5-d604696bf28b, 'name': SearchDatastore_Task, 'duration_secs': 0.00945} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1680.183709] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d2f4153e-3ad2-4a7b-9c6c-e0bb015d0b9c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.190105] env[63024]: DEBUG oslo_vmware.api [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Waiting for the task: (returnval){ [ 1680.190105] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52ffa96b-ad9f-4d01-c24c-ad884ea582d1" [ 1680.190105] env[63024]: _type = "Task" [ 1680.190105] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.198322] env[63024]: DEBUG nova.compute.utils [None req-6da2c6f0-6934-4e99-9bfe-309fcc14df69 tempest-VolumesAssistedSnapshotsTest-1116976750 tempest-VolumesAssistedSnapshotsTest-1116976750-project-admin] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1680.209127] env[63024]: DEBUG oslo_vmware.api [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52ffa96b-ad9f-4d01-c24c-ad884ea582d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.305428] env[63024]: DEBUG oslo_vmware.api [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Task: {'id': task-1950700, 'name': Rename_Task, 'duration_secs': 0.150028} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1680.305749] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1680.306017] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c5e5efd5-0e65-4e6e-8831-d99974254ffb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.314634] env[63024]: DEBUG oslo_vmware.api [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Waiting for the task: (returnval){ [ 1680.314634] env[63024]: value = "task-1950701" [ 1680.314634] env[63024]: _type = "Task" [ 1680.314634] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.323162] env[63024]: DEBUG oslo_vmware.api [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Task: {'id': task-1950701, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.344263] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1680.531461] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74fdbe79-e9e3-4f2e-b362-6c26989c7147 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.541905] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d645fcb0-23d8-4635-b292-e0aab98b7fd6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.579694] env[63024]: DEBUG oslo_concurrency.lockutils [req-4a541910-a759-4b67-bfca-daeb873b9e09 req-a044e689-c181-4796-b7d4-adb58d0c91d9 service nova] Releasing lock "refresh_cache-94d9210e-ca8d-4ef1-a640-2d9a11ad87d3" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1680.580979] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a29441e0-fd65-4444-b3be-a4a6a02a11de {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.590815] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f58a8ca-5f8c-4197-b68e-fc221399544c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.610511] env[63024]: DEBUG nova.compute.provider_tree [None req-272a0e64-f771-421a-823f-80b9a4754f5d tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1680.704940] env[63024]: DEBUG oslo_vmware.api [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52ffa96b-ad9f-4d01-c24c-ad884ea582d1, 'name': SearchDatastore_Task, 'duration_secs': 0.026655} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1680.704940] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6da2c6f0-6934-4e99-9bfe-309fcc14df69 tempest-VolumesAssistedSnapshotsTest-1116976750 tempest-VolumesAssistedSnapshotsTest-1116976750-project-admin] Lock "61fdfa06-cb40-44a3-8abc-428b26bd40f5" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.013s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1680.704940] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1680.704940] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3/94d9210e-ca8d-4ef1-a640-2d9a11ad87d3.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1680.705291] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a63fe31f-4d1a-4cf4-8d6a-4e391bebb0a8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1680.713765] env[63024]: DEBUG oslo_vmware.api [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Waiting for the task: (returnval){ [ 1680.713765] env[63024]: value = "task-1950702" [ 1680.713765] env[63024]: _type = "Task" [ 1680.713765] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1680.723446] env[63024]: DEBUG oslo_vmware.api [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Task: {'id': task-1950702, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1680.829726] env[63024]: DEBUG oslo_vmware.api [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Task: {'id': task-1950701, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.116495] env[63024]: DEBUG nova.scheduler.client.report [None req-272a0e64-f771-421a-823f-80b9a4754f5d tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1681.227180] env[63024]: DEBUG oslo_vmware.api [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Task: {'id': task-1950702, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.331532] env[63024]: DEBUG oslo_vmware.api [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Task: {'id': task-1950701, 'name': PowerOnVM_Task, 'duration_secs': 0.77614} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1681.331532] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1681.331975] env[63024]: INFO nova.compute.manager [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Took 8.74 seconds to spawn the instance on the hypervisor. [ 1681.332319] env[63024]: DEBUG nova.compute.manager [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1681.335037] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4138816-817c-4e9f-b141-9efd251560f1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.623542] env[63024]: DEBUG oslo_concurrency.lockutils [None req-272a0e64-f771-421a-823f-80b9a4754f5d tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.078s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1681.628176] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.591s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1681.630612] env[63024]: INFO nova.compute.claims [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1681.655570] env[63024]: INFO nova.scheduler.client.report [None req-272a0e64-f771-421a-823f-80b9a4754f5d tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Deleted allocations for instance 22ef5bae-f7bc-43c7-9d77-1b4547e83b24 [ 1681.729062] env[63024]: DEBUG oslo_vmware.api [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Task: {'id': task-1950702, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.603525} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1681.729651] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3/94d9210e-ca8d-4ef1-a640-2d9a11ad87d3.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1681.730107] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1681.731324] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-47cb4b3f-cf16-455b-a2d7-b4812b0f9bfb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.739739] env[63024]: DEBUG oslo_vmware.api [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Waiting for the task: (returnval){ [ 1681.739739] env[63024]: value = "task-1950703" [ 1681.739739] env[63024]: _type = "Task" [ 1681.739739] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1681.753901] env[63024]: DEBUG oslo_vmware.api [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Task: {'id': task-1950703, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1681.858531] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6da2c6f0-6934-4e99-9bfe-309fcc14df69 tempest-VolumesAssistedSnapshotsTest-1116976750 tempest-VolumesAssistedSnapshotsTest-1116976750-project-admin] Acquiring lock "61fdfa06-cb40-44a3-8abc-428b26bd40f5" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1681.858831] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6da2c6f0-6934-4e99-9bfe-309fcc14df69 tempest-VolumesAssistedSnapshotsTest-1116976750 tempest-VolumesAssistedSnapshotsTest-1116976750-project-admin] Lock "61fdfa06-cb40-44a3-8abc-428b26bd40f5" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1681.859034] env[63024]: INFO nova.compute.manager [None req-6da2c6f0-6934-4e99-9bfe-309fcc14df69 tempest-VolumesAssistedSnapshotsTest-1116976750 tempest-VolumesAssistedSnapshotsTest-1116976750-project-admin] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Attaching volume 227b97fd-a50b-436c-b51c-e971b3da8e00 to /dev/sdb [ 1681.870298] env[63024]: INFO nova.compute.manager [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Took 45.18 seconds to build instance. [ 1681.934827] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d52b93f1-dfa4-4d58-8897-c5eb60ef942a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.943562] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88f337bc-12db-457b-8dad-348e891810a1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1681.961613] env[63024]: DEBUG nova.virt.block_device [None req-6da2c6f0-6934-4e99-9bfe-309fcc14df69 tempest-VolumesAssistedSnapshotsTest-1116976750 tempest-VolumesAssistedSnapshotsTest-1116976750-project-admin] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Updating existing volume attachment record: bf2142d9-0cbf-4f64-b393-a737471dc8c4 {{(pid=63024) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1682.172271] env[63024]: DEBUG oslo_concurrency.lockutils [None req-272a0e64-f771-421a-823f-80b9a4754f5d tempest-ImagesOneServerTestJSON-1965368258 tempest-ImagesOneServerTestJSON-1965368258-project-member] Lock "22ef5bae-f7bc-43c7-9d77-1b4547e83b24" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.967s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1682.252906] env[63024]: DEBUG oslo_vmware.api [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Task: {'id': task-1950703, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.085357} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1682.253662] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1682.254691] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e1752d1-2fb9-4210-baa3-11eeaa41fc93 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.289575] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] Reconfiguring VM instance instance-00000022 to attach disk [datastore1] 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3/94d9210e-ca8d-4ef1-a640-2d9a11ad87d3.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1682.290592] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-89e88961-873d-465b-85ea-6920dc1d1b32 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.313129] env[63024]: DEBUG oslo_concurrency.lockutils [None req-45f4a3d9-7e19-42fd-8f06-b8994703ad36 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "610dd030-5080-498a-8744-b1411297d70d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1682.313298] env[63024]: DEBUG oslo_concurrency.lockutils [None req-45f4a3d9-7e19-42fd-8f06-b8994703ad36 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "610dd030-5080-498a-8744-b1411297d70d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1682.313712] env[63024]: DEBUG oslo_concurrency.lockutils [None req-45f4a3d9-7e19-42fd-8f06-b8994703ad36 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "610dd030-5080-498a-8744-b1411297d70d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1682.313712] env[63024]: DEBUG oslo_concurrency.lockutils [None req-45f4a3d9-7e19-42fd-8f06-b8994703ad36 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "610dd030-5080-498a-8744-b1411297d70d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1682.313835] env[63024]: DEBUG oslo_concurrency.lockutils [None req-45f4a3d9-7e19-42fd-8f06-b8994703ad36 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "610dd030-5080-498a-8744-b1411297d70d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1682.316910] env[63024]: INFO nova.compute.manager [None req-45f4a3d9-7e19-42fd-8f06-b8994703ad36 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Terminating instance [ 1682.320824] env[63024]: DEBUG oslo_vmware.api [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Waiting for the task: (returnval){ [ 1682.320824] env[63024]: value = "task-1950707" [ 1682.320824] env[63024]: _type = "Task" [ 1682.320824] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1682.336888] env[63024]: DEBUG oslo_vmware.api [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Task: {'id': task-1950707, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.377425] env[63024]: DEBUG oslo_concurrency.lockutils [None req-72bf3bc7-1042-4d2d-b5de-247bae98888c tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Lock "18444b47-476a-4ca3-9a4f-0dc58e652143" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.468s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1682.430903] env[63024]: DEBUG oslo_vmware.rw_handles [None req-937596a1-291a-4c0f-a0d4-2ef4220afec7 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52897bf6-a52c-0924-0623-0550e2456f96/disk-0.vmdk. {{(pid=63024) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1682.432164] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56bd139f-1c30-4189-b4e6-0a54b25efc51 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.440395] env[63024]: DEBUG oslo_vmware.rw_handles [None req-937596a1-291a-4c0f-a0d4-2ef4220afec7 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52897bf6-a52c-0924-0623-0550e2456f96/disk-0.vmdk is in state: ready. {{(pid=63024) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1682.440813] env[63024]: ERROR oslo_vmware.rw_handles [None req-937596a1-291a-4c0f-a0d4-2ef4220afec7 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52897bf6-a52c-0924-0623-0550e2456f96/disk-0.vmdk due to incomplete transfer. [ 1682.440974] env[63024]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-622edf4e-41fc-40dc-8b2c-a6e66065962b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.451178] env[63024]: DEBUG oslo_vmware.rw_handles [None req-937596a1-291a-4c0f-a0d4-2ef4220afec7 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52897bf6-a52c-0924-0623-0550e2456f96/disk-0.vmdk. {{(pid=63024) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1682.451178] env[63024]: DEBUG nova.virt.vmwareapi.images [None req-937596a1-291a-4c0f-a0d4-2ef4220afec7 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Uploaded image bb186550-d504-4b5e-a6f4-986b2f2973a7 to the Glance image server {{(pid=63024) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1682.452764] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-937596a1-291a-4c0f-a0d4-2ef4220afec7 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Destroying the VM {{(pid=63024) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1682.453361] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-8418dedd-58de-41b1-9969-965cc37c4a40 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.460337] env[63024]: DEBUG oslo_vmware.api [None req-937596a1-291a-4c0f-a0d4-2ef4220afec7 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1682.460337] env[63024]: value = "task-1950708" [ 1682.460337] env[63024]: _type = "Task" [ 1682.460337] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1682.471399] env[63024]: DEBUG oslo_vmware.api [None req-937596a1-291a-4c0f-a0d4-2ef4220afec7 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950708, 'name': Destroy_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.829841] env[63024]: DEBUG nova.compute.manager [None req-45f4a3d9-7e19-42fd-8f06-b8994703ad36 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1682.830236] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-45f4a3d9-7e19-42fd-8f06-b8994703ad36 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1682.831740] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45a0caa5-a736-4f64-b664-7d35dca600ce {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.844710] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-45f4a3d9-7e19-42fd-8f06-b8994703ad36 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1682.853443] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-22424334-1234-4347-8d1e-f868b9a68650 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1682.855311] env[63024]: DEBUG oslo_vmware.api [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Task: {'id': task-1950707, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.870168] env[63024]: DEBUG oslo_vmware.api [None req-45f4a3d9-7e19-42fd-8f06-b8994703ad36 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1682.870168] env[63024]: value = "task-1950709" [ 1682.870168] env[63024]: _type = "Task" [ 1682.870168] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1682.886895] env[63024]: DEBUG oslo_vmware.api [None req-45f4a3d9-7e19-42fd-8f06-b8994703ad36 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950709, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1682.974241] env[63024]: DEBUG oslo_vmware.api [None req-937596a1-291a-4c0f-a0d4-2ef4220afec7 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950708, 'name': Destroy_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.204243] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1c383bf-7d4b-4d0d-83ff-c538f697be3d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.214445] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35de2aac-e6a3-44a3-81c0-a10eb0181e8f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.257045] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46cc63df-36b8-4bf2-bd72-cfb9208c5e70 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.266200] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37325313-4aee-421f-8e02-47cde48847e4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.284102] env[63024]: DEBUG nova.compute.provider_tree [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1683.342772] env[63024]: DEBUG oslo_vmware.api [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Task: {'id': task-1950707, 'name': ReconfigVM_Task, 'duration_secs': 0.565885} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1683.343701] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] Reconfigured VM instance instance-00000022 to attach disk [datastore1] 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3/94d9210e-ca8d-4ef1-a640-2d9a11ad87d3.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1683.345129] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0164016b-c91a-4f28-8592-07d6ff9fd639 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.351294] env[63024]: DEBUG oslo_vmware.api [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Waiting for the task: (returnval){ [ 1683.351294] env[63024]: value = "task-1950710" [ 1683.351294] env[63024]: _type = "Task" [ 1683.351294] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1683.361728] env[63024]: DEBUG oslo_vmware.api [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Task: {'id': task-1950710, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.387248] env[63024]: DEBUG oslo_vmware.api [None req-45f4a3d9-7e19-42fd-8f06-b8994703ad36 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950709, 'name': PowerOffVM_Task, 'duration_secs': 0.246225} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1683.387248] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-45f4a3d9-7e19-42fd-8f06-b8994703ad36 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1683.387248] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-45f4a3d9-7e19-42fd-8f06-b8994703ad36 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1683.387248] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-21a64e61-f4fd-4129-83e4-34a3ba9dcc26 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.460519] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-45f4a3d9-7e19-42fd-8f06-b8994703ad36 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1683.460912] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-45f4a3d9-7e19-42fd-8f06-b8994703ad36 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1683.461057] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-45f4a3d9-7e19-42fd-8f06-b8994703ad36 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Deleting the datastore file [datastore1] 610dd030-5080-498a-8744-b1411297d70d {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1683.461425] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-41e13b0c-1e08-4bad-8900-513e52767927 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.472992] env[63024]: DEBUG oslo_vmware.api [None req-937596a1-291a-4c0f-a0d4-2ef4220afec7 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950708, 'name': Destroy_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.474586] env[63024]: DEBUG oslo_vmware.api [None req-45f4a3d9-7e19-42fd-8f06-b8994703ad36 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1683.474586] env[63024]: value = "task-1950712" [ 1683.474586] env[63024]: _type = "Task" [ 1683.474586] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1683.789618] env[63024]: DEBUG nova.scheduler.client.report [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1683.863919] env[63024]: DEBUG oslo_vmware.api [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Task: {'id': task-1950710, 'name': Rename_Task, 'duration_secs': 0.201058} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1683.864315] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1683.864779] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9fe0bf69-4fb0-4b93-ac22-f203eaf660bb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.871838] env[63024]: DEBUG oslo_vmware.api [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Waiting for the task: (returnval){ [ 1683.871838] env[63024]: value = "task-1950713" [ 1683.871838] env[63024]: _type = "Task" [ 1683.871838] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1683.881586] env[63024]: DEBUG oslo_vmware.api [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Task: {'id': task-1950713, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1683.974101] env[63024]: DEBUG oslo_vmware.api [None req-937596a1-291a-4c0f-a0d4-2ef4220afec7 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950708, 'name': Destroy_Task, 'duration_secs': 1.367118} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1683.974516] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-937596a1-291a-4c0f-a0d4-2ef4220afec7 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Destroyed the VM [ 1683.974723] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-937596a1-291a-4c0f-a0d4-2ef4220afec7 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Deleting Snapshot of the VM instance {{(pid=63024) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1683.975078] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-dec4f75f-f5bb-4b49-b6cf-e35995c9f71a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1683.986563] env[63024]: DEBUG oslo_vmware.api [None req-45f4a3d9-7e19-42fd-8f06-b8994703ad36 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950712, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.292682} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1683.988113] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-45f4a3d9-7e19-42fd-8f06-b8994703ad36 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1683.988322] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-45f4a3d9-7e19-42fd-8f06-b8994703ad36 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1683.988500] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-45f4a3d9-7e19-42fd-8f06-b8994703ad36 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1683.988789] env[63024]: INFO nova.compute.manager [None req-45f4a3d9-7e19-42fd-8f06-b8994703ad36 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1683.989041] env[63024]: DEBUG oslo.service.loopingcall [None req-45f4a3d9-7e19-42fd-8f06-b8994703ad36 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1683.989271] env[63024]: DEBUG oslo_vmware.api [None req-937596a1-291a-4c0f-a0d4-2ef4220afec7 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1683.989271] env[63024]: value = "task-1950714" [ 1683.989271] env[63024]: _type = "Task" [ 1683.989271] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1683.989754] env[63024]: DEBUG nova.compute.manager [-] [instance: 610dd030-5080-498a-8744-b1411297d70d] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1683.989754] env[63024]: DEBUG nova.network.neutron [-] [instance: 610dd030-5080-498a-8744-b1411297d70d] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1684.001460] env[63024]: DEBUG oslo_vmware.api [None req-937596a1-291a-4c0f-a0d4-2ef4220afec7 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950714, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.211428] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "8edc24d6-9073-4836-b14b-422df3ac1b88" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1684.212202] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "8edc24d6-9073-4836-b14b-422df3ac1b88" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1684.295399] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.667s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1684.296469] env[63024]: DEBUG nova.compute.manager [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1684.301798] env[63024]: DEBUG oslo_concurrency.lockutils [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.515s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1684.306030] env[63024]: INFO nova.compute.claims [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1684.390166] env[63024]: DEBUG oslo_vmware.api [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Task: {'id': task-1950713, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.505319] env[63024]: DEBUG oslo_vmware.api [None req-937596a1-291a-4c0f-a0d4-2ef4220afec7 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950714, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1684.717351] env[63024]: DEBUG nova.compute.manager [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1684.805199] env[63024]: DEBUG nova.compute.utils [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1684.807704] env[63024]: DEBUG nova.compute.manager [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1684.807704] env[63024]: DEBUG nova.network.neutron [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1684.893529] env[63024]: DEBUG oslo_vmware.api [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Task: {'id': task-1950713, 'name': PowerOnVM_Task, 'duration_secs': 0.659385} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1684.895255] env[63024]: DEBUG nova.policy [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3b5ef55459604a2f8a0170eda5ed5f67', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3a86fdb7cd0e415d9ec099d327fbdca3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1684.896979] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1684.897234] env[63024]: INFO nova.compute.manager [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] Took 8.65 seconds to spawn the instance on the hypervisor. [ 1684.897406] env[63024]: DEBUG nova.compute.manager [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1684.898616] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-033f30a6-4a15-4bdb-94bc-8a4b41653778 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.006177] env[63024]: DEBUG oslo_vmware.api [None req-937596a1-291a-4c0f-a0d4-2ef4220afec7 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950714, 'name': RemoveSnapshot_Task, 'duration_secs': 0.825726} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1685.006668] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-937596a1-291a-4c0f-a0d4-2ef4220afec7 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Deleted Snapshot of the VM instance {{(pid=63024) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1685.007892] env[63024]: INFO nova.compute.manager [None req-937596a1-291a-4c0f-a0d4-2ef4220afec7 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Took 16.36 seconds to snapshot the instance on the hypervisor. [ 1685.249634] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1685.265301] env[63024]: DEBUG nova.network.neutron [-] [instance: 610dd030-5080-498a-8744-b1411297d70d] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1685.312767] env[63024]: DEBUG nova.compute.manager [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1685.421167] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1c1e9c37-46be-4328-b652-77e4fa42b215 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquiring lock "1ad97ed0-2a84-4783-8511-e0f6b24861bd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1685.421167] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1c1e9c37-46be-4328-b652-77e4fa42b215 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "1ad97ed0-2a84-4783-8511-e0f6b24861bd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1685.421167] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1c1e9c37-46be-4328-b652-77e4fa42b215 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquiring lock "1ad97ed0-2a84-4783-8511-e0f6b24861bd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1685.421167] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1c1e9c37-46be-4328-b652-77e4fa42b215 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "1ad97ed0-2a84-4783-8511-e0f6b24861bd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1685.421432] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1c1e9c37-46be-4328-b652-77e4fa42b215 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "1ad97ed0-2a84-4783-8511-e0f6b24861bd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1685.426872] env[63024]: INFO nova.compute.manager [None req-1c1e9c37-46be-4328-b652-77e4fa42b215 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Terminating instance [ 1685.434498] env[63024]: INFO nova.compute.manager [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] Took 47.45 seconds to build instance. [ 1685.516741] env[63024]: DEBUG nova.compute.manager [None req-937596a1-291a-4c0f-a0d4-2ef4220afec7 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Instance disappeared during snapshot {{(pid=63024) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4581}} [ 1685.534534] env[63024]: DEBUG nova.compute.manager [None req-937596a1-291a-4c0f-a0d4-2ef4220afec7 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Image not found during clean up bb186550-d504-4b5e-a6f4-986b2f2973a7 {{(pid=63024) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4587}} [ 1685.566036] env[63024]: DEBUG oslo_concurrency.lockutils [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "df2933d1-32c3-48a6-8ceb-d5e3047d0b78" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1685.566155] env[63024]: DEBUG oslo_concurrency.lockutils [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "df2933d1-32c3-48a6-8ceb-d5e3047d0b78" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1685.597172] env[63024]: DEBUG nova.compute.manager [req-df134e7c-b9ed-42e2-895c-499015610491 req-73213e32-1b41-43bb-94fd-6eb5c38df2aa service nova] [instance: 610dd030-5080-498a-8744-b1411297d70d] Received event network-vif-deleted-9a7cae93-d7ad-4e7c-b18b-00e7e749299e {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1685.770377] env[63024]: INFO nova.compute.manager [-] [instance: 610dd030-5080-498a-8744-b1411297d70d] Took 1.78 seconds to deallocate network for instance. [ 1685.800047] env[63024]: DEBUG nova.network.neutron [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Successfully created port: ab9681ef-e7dc-4992-be61-3ef37483b9b8 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1685.817591] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3242b123-d37b-4f5b-8585-b6ab735fc876 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.830923] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d8bf2fa-9e5f-48db-ae3c-dc32e7fabad7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.872109] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beaed87f-4ad8-4918-9ed6-32fdb934c4ba {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.880766] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37288888-a49b-425f-8bcf-f8fb68c04eb1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.898176] env[63024]: DEBUG nova.compute.provider_tree [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1685.939305] env[63024]: DEBUG nova.compute.manager [None req-1c1e9c37-46be-4328-b652-77e4fa42b215 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1685.939305] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1c1e9c37-46be-4328-b652-77e4fa42b215 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1685.939305] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d1461c93-5ccb-4b20-8394-021f32567991 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Lock "94d9210e-ca8d-4ef1-a640-2d9a11ad87d3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.303s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1685.939677] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf7bab15-81be-4570-9909-dffc199667f9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1685.952279] env[63024]: DEBUG oslo_concurrency.lockutils [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Acquiring lock "3815d381-760d-40fc-98cf-8e6af287007f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1685.952511] env[63024]: DEBUG oslo_concurrency.lockutils [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Lock "3815d381-760d-40fc-98cf-8e6af287007f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1685.953711] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1c1e9c37-46be-4328-b652-77e4fa42b215 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1685.954410] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b7503711-0f24-4472-a393-a976bb7121b9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.026300] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1c1e9c37-46be-4328-b652-77e4fa42b215 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1686.026447] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1c1e9c37-46be-4328-b652-77e4fa42b215 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1686.026690] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c1e9c37-46be-4328-b652-77e4fa42b215 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Deleting the datastore file [datastore1] 1ad97ed0-2a84-4783-8511-e0f6b24861bd {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1686.026899] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b537744b-9566-429b-b489-13636cb89461 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.037344] env[63024]: DEBUG oslo_vmware.api [None req-1c1e9c37-46be-4328-b652-77e4fa42b215 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1686.037344] env[63024]: value = "task-1950717" [ 1686.037344] env[63024]: _type = "Task" [ 1686.037344] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1686.048101] env[63024]: DEBUG oslo_vmware.api [None req-1c1e9c37-46be-4328-b652-77e4fa42b215 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950717, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.074101] env[63024]: DEBUG nova.compute.manager [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1686.280546] env[63024]: DEBUG oslo_concurrency.lockutils [None req-45f4a3d9-7e19-42fd-8f06-b8994703ad36 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1686.329568] env[63024]: DEBUG nova.compute.manager [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1686.367547] env[63024]: DEBUG nova.virt.hardware [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1686.368083] env[63024]: DEBUG nova.virt.hardware [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1686.368421] env[63024]: DEBUG nova.virt.hardware [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1686.368895] env[63024]: DEBUG nova.virt.hardware [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1686.369239] env[63024]: DEBUG nova.virt.hardware [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1686.369644] env[63024]: DEBUG nova.virt.hardware [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1686.372025] env[63024]: DEBUG nova.virt.hardware [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1686.372025] env[63024]: DEBUG nova.virt.hardware [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1686.372025] env[63024]: DEBUG nova.virt.hardware [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1686.372025] env[63024]: DEBUG nova.virt.hardware [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1686.372025] env[63024]: DEBUG nova.virt.hardware [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1686.372397] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7301741-cf84-4730-8c17-5212d00d3fbe {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.382724] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7312dba1-0e74-48e8-a12e-76a9d95a7df3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.407412] env[63024]: DEBUG nova.scheduler.client.report [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1686.457168] env[63024]: DEBUG nova.compute.manager [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1686.550857] env[63024]: DEBUG oslo_vmware.api [None req-1c1e9c37-46be-4328-b652-77e4fa42b215 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950717, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.299776} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1686.552091] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-6da2c6f0-6934-4e99-9bfe-309fcc14df69 tempest-VolumesAssistedSnapshotsTest-1116976750 tempest-VolumesAssistedSnapshotsTest-1116976750-project-admin] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Volume attach. Driver type: vmdk {{(pid=63024) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1686.552673] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-6da2c6f0-6934-4e99-9bfe-309fcc14df69 tempest-VolumesAssistedSnapshotsTest-1116976750 tempest-VolumesAssistedSnapshotsTest-1116976750-project-admin] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402070', 'volume_id': '227b97fd-a50b-436c-b51c-e971b3da8e00', 'name': 'volume-227b97fd-a50b-436c-b51c-e971b3da8e00', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '61fdfa06-cb40-44a3-8abc-428b26bd40f5', 'attached_at': '', 'detached_at': '', 'volume_id': '227b97fd-a50b-436c-b51c-e971b3da8e00', 'serial': '227b97fd-a50b-436c-b51c-e971b3da8e00'} {{(pid=63024) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1686.552841] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c1e9c37-46be-4328-b652-77e4fa42b215 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1686.553097] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1c1e9c37-46be-4328-b652-77e4fa42b215 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1686.555232] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1c1e9c37-46be-4328-b652-77e4fa42b215 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1686.555510] env[63024]: INFO nova.compute.manager [None req-1c1e9c37-46be-4328-b652-77e4fa42b215 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1686.555820] env[63024]: DEBUG oslo.service.loopingcall [None req-1c1e9c37-46be-4328-b652-77e4fa42b215 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1686.556688] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8528daab-6d99-44c3-bb49-b2f02d0d7924 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.559800] env[63024]: DEBUG nova.compute.manager [-] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1686.559881] env[63024]: DEBUG nova.network.neutron [-] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1686.583391] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23059c97-e891-47df-bc26-667f27d94dbc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.611540] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-6da2c6f0-6934-4e99-9bfe-309fcc14df69 tempest-VolumesAssistedSnapshotsTest-1116976750 tempest-VolumesAssistedSnapshotsTest-1116976750-project-admin] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Reconfiguring VM instance instance-00000007 to attach disk [datastore1] volume-227b97fd-a50b-436c-b51c-e971b3da8e00/volume-227b97fd-a50b-436c-b51c-e971b3da8e00.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1686.611540] env[63024]: DEBUG oslo_concurrency.lockutils [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1686.611540] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e801fbce-eef5-4e8c-ac25-87e7623b4a8a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1686.630773] env[63024]: DEBUG oslo_vmware.api [None req-6da2c6f0-6934-4e99-9bfe-309fcc14df69 tempest-VolumesAssistedSnapshotsTest-1116976750 tempest-VolumesAssistedSnapshotsTest-1116976750-project-admin] Waiting for the task: (returnval){ [ 1686.630773] env[63024]: value = "task-1950718" [ 1686.630773] env[63024]: _type = "Task" [ 1686.630773] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1686.638926] env[63024]: DEBUG oslo_vmware.api [None req-6da2c6f0-6934-4e99-9bfe-309fcc14df69 tempest-VolumesAssistedSnapshotsTest-1116976750 tempest-VolumesAssistedSnapshotsTest-1116976750-project-admin] Task: {'id': task-1950718, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1686.916794] env[63024]: DEBUG oslo_concurrency.lockutils [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.613s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1686.916794] env[63024]: DEBUG nova.compute.manager [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1686.918698] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a6ccd98c-046f-49d4-8435-f0e021b8096f tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.768s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1686.918777] env[63024]: DEBUG nova.objects.instance [None req-a6ccd98c-046f-49d4-8435-f0e021b8096f tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Lazy-loading 'resources' on Instance uuid 85d6db13-d317-498e-a36a-972e9b36e82b {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1686.990323] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquiring lock "37792b57-3347-4134-a060-53359afa3298" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1686.991480] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "37792b57-3347-4134-a060-53359afa3298" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1686.994354] env[63024]: DEBUG oslo_concurrency.lockutils [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1687.141343] env[63024]: DEBUG oslo_vmware.api [None req-6da2c6f0-6934-4e99-9bfe-309fcc14df69 tempest-VolumesAssistedSnapshotsTest-1116976750 tempest-VolumesAssistedSnapshotsTest-1116976750-project-admin] Task: {'id': task-1950718, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.338079] env[63024]: DEBUG oslo_concurrency.lockutils [None req-148b4eee-1743-4e98-81cf-df60b01cb2f8 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Acquiring lock "94d9210e-ca8d-4ef1-a640-2d9a11ad87d3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1687.338079] env[63024]: DEBUG oslo_concurrency.lockutils [None req-148b4eee-1743-4e98-81cf-df60b01cb2f8 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Lock "94d9210e-ca8d-4ef1-a640-2d9a11ad87d3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1687.338402] env[63024]: DEBUG oslo_concurrency.lockutils [None req-148b4eee-1743-4e98-81cf-df60b01cb2f8 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Acquiring lock "94d9210e-ca8d-4ef1-a640-2d9a11ad87d3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1687.338503] env[63024]: DEBUG oslo_concurrency.lockutils [None req-148b4eee-1743-4e98-81cf-df60b01cb2f8 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Lock "94d9210e-ca8d-4ef1-a640-2d9a11ad87d3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1687.338624] env[63024]: DEBUG oslo_concurrency.lockutils [None req-148b4eee-1743-4e98-81cf-df60b01cb2f8 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Lock "94d9210e-ca8d-4ef1-a640-2d9a11ad87d3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1687.341129] env[63024]: INFO nova.compute.manager [None req-148b4eee-1743-4e98-81cf-df60b01cb2f8 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] Terminating instance [ 1687.383535] env[63024]: DEBUG nova.compute.manager [req-42aee67e-de53-4122-83e9-8af3e354f82f req-aab2e945-361d-46d9-8d62-7c38f4f7cde0 service nova] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Received event network-vif-deleted-9f2961a8-afb7-4cf5-8517-52799f5c0bd1 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1687.383786] env[63024]: INFO nova.compute.manager [req-42aee67e-de53-4122-83e9-8af3e354f82f req-aab2e945-361d-46d9-8d62-7c38f4f7cde0 service nova] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Neutron deleted interface 9f2961a8-afb7-4cf5-8517-52799f5c0bd1; detaching it from the instance and deleting it from the info cache [ 1687.384011] env[63024]: DEBUG nova.network.neutron [req-42aee67e-de53-4122-83e9-8af3e354f82f req-aab2e945-361d-46d9-8d62-7c38f4f7cde0 service nova] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1687.422923] env[63024]: DEBUG nova.compute.utils [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1687.426996] env[63024]: DEBUG nova.compute.manager [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1687.427195] env[63024]: DEBUG nova.network.neutron [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1687.489287] env[63024]: DEBUG nova.policy [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e4425d114dcd4c7eb8a9d7108fde5098', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '39daa869227548e6be78a2244551deea', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1687.498092] env[63024]: DEBUG nova.compute.manager [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 37792b57-3347-4134-a060-53359afa3298] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1687.652416] env[63024]: DEBUG oslo_vmware.api [None req-6da2c6f0-6934-4e99-9bfe-309fcc14df69 tempest-VolumesAssistedSnapshotsTest-1116976750 tempest-VolumesAssistedSnapshotsTest-1116976750-project-admin] Task: {'id': task-1950718, 'name': ReconfigVM_Task, 'duration_secs': 0.697157} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1687.653852] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-6da2c6f0-6934-4e99-9bfe-309fcc14df69 tempest-VolumesAssistedSnapshotsTest-1116976750 tempest-VolumesAssistedSnapshotsTest-1116976750-project-admin] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Reconfigured VM instance instance-00000007 to attach disk [datastore1] volume-227b97fd-a50b-436c-b51c-e971b3da8e00/volume-227b97fd-a50b-436c-b51c-e971b3da8e00.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1687.660486] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b98f0369-228b-4f21-b6c3-086ac2c04859 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.688218] env[63024]: DEBUG oslo_vmware.api [None req-6da2c6f0-6934-4e99-9bfe-309fcc14df69 tempest-VolumesAssistedSnapshotsTest-1116976750 tempest-VolumesAssistedSnapshotsTest-1116976750-project-admin] Waiting for the task: (returnval){ [ 1687.688218] env[63024]: value = "task-1950719" [ 1687.688218] env[63024]: _type = "Task" [ 1687.688218] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1687.688983] env[63024]: DEBUG nova.network.neutron [-] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1687.705970] env[63024]: DEBUG oslo_vmware.api [None req-6da2c6f0-6934-4e99-9bfe-309fcc14df69 tempest-VolumesAssistedSnapshotsTest-1116976750 tempest-VolumesAssistedSnapshotsTest-1116976750-project-admin] Task: {'id': task-1950719, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.748199] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquiring lock "c1fd4146-6dd3-49e9-a744-466e6168e158" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1687.748569] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Lock "c1fd4146-6dd3-49e9-a744-466e6168e158" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.003s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1687.818808] env[63024]: DEBUG nova.compute.manager [req-eb192c9e-244c-438f-8514-e2a25f133d74 req-9c5f0acc-e5d1-4fd7-bd59-a93a949cdcff service nova] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Received event network-changed-008138b2-5e34-470d-b8f1-93b1ca8df541 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1687.819017] env[63024]: DEBUG nova.compute.manager [req-eb192c9e-244c-438f-8514-e2a25f133d74 req-9c5f0acc-e5d1-4fd7-bd59-a93a949cdcff service nova] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Refreshing instance network info cache due to event network-changed-008138b2-5e34-470d-b8f1-93b1ca8df541. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1687.819233] env[63024]: DEBUG oslo_concurrency.lockutils [req-eb192c9e-244c-438f-8514-e2a25f133d74 req-9c5f0acc-e5d1-4fd7-bd59-a93a949cdcff service nova] Acquiring lock "refresh_cache-18444b47-476a-4ca3-9a4f-0dc58e652143" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1687.819373] env[63024]: DEBUG oslo_concurrency.lockutils [req-eb192c9e-244c-438f-8514-e2a25f133d74 req-9c5f0acc-e5d1-4fd7-bd59-a93a949cdcff service nova] Acquired lock "refresh_cache-18444b47-476a-4ca3-9a4f-0dc58e652143" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1687.819529] env[63024]: DEBUG nova.network.neutron [req-eb192c9e-244c-438f-8514-e2a25f133d74 req-9c5f0acc-e5d1-4fd7-bd59-a93a949cdcff service nova] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Refreshing network info cache for port 008138b2-5e34-470d-b8f1-93b1ca8df541 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1687.846041] env[63024]: DEBUG nova.compute.manager [None req-148b4eee-1743-4e98-81cf-df60b01cb2f8 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1687.846566] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-148b4eee-1743-4e98-81cf-df60b01cb2f8 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1687.847433] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f379a508-7bdd-4ed4-8e0e-91340697ecab {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.866843] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-148b4eee-1743-4e98-81cf-df60b01cb2f8 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1687.867386] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bf57391e-5bb0-41fa-8a2b-3520048e6977 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.876860] env[63024]: DEBUG oslo_vmware.api [None req-148b4eee-1743-4e98-81cf-df60b01cb2f8 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Waiting for the task: (returnval){ [ 1687.876860] env[63024]: value = "task-1950720" [ 1687.876860] env[63024]: _type = "Task" [ 1687.876860] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1687.889160] env[63024]: DEBUG oslo_vmware.api [None req-148b4eee-1743-4e98-81cf-df60b01cb2f8 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Task: {'id': task-1950720, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1687.889387] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b90715bf-9adb-4c69-a3fc-c262cb12ec8e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.901777] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54ea1f2e-ed12-40da-837c-98192e807e9a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1687.913745] env[63024]: DEBUG nova.network.neutron [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Successfully updated port: ab9681ef-e7dc-4992-be61-3ef37483b9b8 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1687.938571] env[63024]: DEBUG nova.compute.manager [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1687.941697] env[63024]: DEBUG nova.compute.manager [req-42aee67e-de53-4122-83e9-8af3e354f82f req-aab2e945-361d-46d9-8d62-7c38f4f7cde0 service nova] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Detach interface failed, port_id=9f2961a8-afb7-4cf5-8517-52799f5c0bd1, reason: Instance 1ad97ed0-2a84-4783-8511-e0f6b24861bd could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 1688.021579] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1688.048167] env[63024]: DEBUG nova.network.neutron [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] Successfully created port: e33e15ee-eb9d-4eaf-8eb7-845fe940d23d {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1688.050978] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da93e499-d231-4d76-9ee9-bd4bd9f91028 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.060572] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cac135b7-095d-4e81-9094-19a46cc5178f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.093476] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4259212-26c0-4dd0-826c-a12e2c148022 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.102604] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-754319a4-6226-41a5-a3c0-59d1b9279dd3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.117070] env[63024]: DEBUG nova.compute.provider_tree [None req-a6ccd98c-046f-49d4-8435-f0e021b8096f tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1688.199280] env[63024]: INFO nova.compute.manager [-] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Took 1.64 seconds to deallocate network for instance. [ 1688.199622] env[63024]: DEBUG oslo_vmware.api [None req-6da2c6f0-6934-4e99-9bfe-309fcc14df69 tempest-VolumesAssistedSnapshotsTest-1116976750 tempest-VolumesAssistedSnapshotsTest-1116976750-project-admin] Task: {'id': task-1950719, 'name': ReconfigVM_Task, 'duration_secs': 0.201768} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1688.201383] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-6da2c6f0-6934-4e99-9bfe-309fcc14df69 tempest-VolumesAssistedSnapshotsTest-1116976750 tempest-VolumesAssistedSnapshotsTest-1116976750-project-admin] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402070', 'volume_id': '227b97fd-a50b-436c-b51c-e971b3da8e00', 'name': 'volume-227b97fd-a50b-436c-b51c-e971b3da8e00', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '61fdfa06-cb40-44a3-8abc-428b26bd40f5', 'attached_at': '', 'detached_at': '', 'volume_id': '227b97fd-a50b-436c-b51c-e971b3da8e00', 'serial': '227b97fd-a50b-436c-b51c-e971b3da8e00'} {{(pid=63024) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1688.368160] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquiring lock "82b7019c-5049-4b8b-abb4-46f326ce3d5b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1688.371493] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Lock "82b7019c-5049-4b8b-abb4-46f326ce3d5b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1688.390344] env[63024]: DEBUG oslo_vmware.api [None req-148b4eee-1743-4e98-81cf-df60b01cb2f8 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Task: {'id': task-1950720, 'name': PowerOffVM_Task, 'duration_secs': 0.221796} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1688.390609] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-148b4eee-1743-4e98-81cf-df60b01cb2f8 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1688.390771] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-148b4eee-1743-4e98-81cf-df60b01cb2f8 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1688.391068] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f8007b30-c5b9-4220-ad65-3830d08e9494 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.416914] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Acquiring lock "refresh_cache-28b3bfc7-2bed-4941-9f48-8bd301e1a971" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1688.417088] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Acquired lock "refresh_cache-28b3bfc7-2bed-4941-9f48-8bd301e1a971" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1688.417239] env[63024]: DEBUG nova.network.neutron [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1688.464291] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-148b4eee-1743-4e98-81cf-df60b01cb2f8 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1688.464291] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-148b4eee-1743-4e98-81cf-df60b01cb2f8 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1688.464291] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-148b4eee-1743-4e98-81cf-df60b01cb2f8 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Deleting the datastore file [datastore1] 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1688.464291] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6ac89c7c-4f4b-4680-b4d4-933b0bacedee {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.470749] env[63024]: DEBUG oslo_vmware.api [None req-148b4eee-1743-4e98-81cf-df60b01cb2f8 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Waiting for the task: (returnval){ [ 1688.470749] env[63024]: value = "task-1950722" [ 1688.470749] env[63024]: _type = "Task" [ 1688.470749] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1688.484045] env[63024]: DEBUG oslo_vmware.api [None req-148b4eee-1743-4e98-81cf-df60b01cb2f8 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Task: {'id': task-1950722, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1688.625158] env[63024]: DEBUG nova.scheduler.client.report [None req-a6ccd98c-046f-49d4-8435-f0e021b8096f tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1688.667507] env[63024]: DEBUG nova.network.neutron [req-eb192c9e-244c-438f-8514-e2a25f133d74 req-9c5f0acc-e5d1-4fd7-bd59-a93a949cdcff service nova] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Updated VIF entry in instance network info cache for port 008138b2-5e34-470d-b8f1-93b1ca8df541. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1688.668791] env[63024]: DEBUG nova.network.neutron [req-eb192c9e-244c-438f-8514-e2a25f133d74 req-9c5f0acc-e5d1-4fd7-bd59-a93a949cdcff service nova] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Updating instance_info_cache with network_info: [{"id": "008138b2-5e34-470d-b8f1-93b1ca8df541", "address": "fa:16:3e:89:7f:c6", "network": {"id": "1c8709f7-097f-4437-bae1-c45d9a4d02f1", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1121041191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9981ec11228244fd8b75ee951a940c85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0248a27a-1d7f-4195-987b-06bfc8467347", "external-id": "nsx-vlan-transportzone-26", "segmentation_id": 26, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap008138b2-5e", "ovs_interfaceid": "008138b2-5e34-470d-b8f1-93b1ca8df541", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1688.709753] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1c1e9c37-46be-4328-b652-77e4fa42b215 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1688.948849] env[63024]: DEBUG nova.compute.manager [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1688.983828] env[63024]: DEBUG nova.virt.hardware [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1688.983828] env[63024]: DEBUG nova.virt.hardware [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1688.984069] env[63024]: DEBUG nova.virt.hardware [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1688.984315] env[63024]: DEBUG nova.virt.hardware [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1688.984858] env[63024]: DEBUG nova.virt.hardware [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1688.984858] env[63024]: DEBUG nova.virt.hardware [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1688.988026] env[63024]: DEBUG nova.virt.hardware [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1688.988026] env[63024]: DEBUG nova.virt.hardware [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1688.988026] env[63024]: DEBUG nova.virt.hardware [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1688.988026] env[63024]: DEBUG nova.virt.hardware [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1688.988026] env[63024]: DEBUG nova.virt.hardware [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1688.988477] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d55ea15-1c4d-4574-8de5-0aa99f260bac {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1688.995044] env[63024]: DEBUG nova.network.neutron [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1689.005130] env[63024]: DEBUG oslo_vmware.api [None req-148b4eee-1743-4e98-81cf-df60b01cb2f8 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Task: {'id': task-1950722, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.46795} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1689.007286] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74f97a54-8d04-4ef9-a7ab-3142c9c68915 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.012786] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-148b4eee-1743-4e98-81cf-df60b01cb2f8 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1689.013193] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-148b4eee-1743-4e98-81cf-df60b01cb2f8 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1689.013493] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-148b4eee-1743-4e98-81cf-df60b01cb2f8 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1689.013777] env[63024]: INFO nova.compute.manager [None req-148b4eee-1743-4e98-81cf-df60b01cb2f8 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1689.014743] env[63024]: DEBUG oslo.service.loopingcall [None req-148b4eee-1743-4e98-81cf-df60b01cb2f8 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1689.015089] env[63024]: DEBUG nova.compute.manager [-] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1689.015331] env[63024]: DEBUG nova.network.neutron [-] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1689.137598] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a6ccd98c-046f-49d4-8435-f0e021b8096f tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.219s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1689.144018] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 36.360s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1689.174680] env[63024]: DEBUG oslo_concurrency.lockutils [req-eb192c9e-244c-438f-8514-e2a25f133d74 req-9c5f0acc-e5d1-4fd7-bd59-a93a949cdcff service nova] Releasing lock "refresh_cache-18444b47-476a-4ca3-9a4f-0dc58e652143" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1689.176626] env[63024]: INFO nova.scheduler.client.report [None req-a6ccd98c-046f-49d4-8435-f0e021b8096f tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Deleted allocations for instance 85d6db13-d317-498e-a36a-972e9b36e82b [ 1689.261102] env[63024]: DEBUG nova.objects.instance [None req-6da2c6f0-6934-4e99-9bfe-309fcc14df69 tempest-VolumesAssistedSnapshotsTest-1116976750 tempest-VolumesAssistedSnapshotsTest-1116976750-project-admin] Lazy-loading 'flavor' on Instance uuid 61fdfa06-cb40-44a3-8abc-428b26bd40f5 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1689.342962] env[63024]: DEBUG nova.network.neutron [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Updating instance_info_cache with network_info: [{"id": "ab9681ef-e7dc-4992-be61-3ef37483b9b8", "address": "fa:16:3e:9b:40:e1", "network": {"id": "05e5c67b-622f-489b-803d-eb380e0adc8c", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-529483603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3a86fdb7cd0e415d9ec099d327fbdca3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aec0089a-ff85-4bef-bad8-c84de39af71a", "external-id": "nsx-vlan-transportzone-758", "segmentation_id": 758, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab9681ef-e7", "ovs_interfaceid": "ab9681ef-e7dc-4992-be61-3ef37483b9b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1689.689795] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a6ccd98c-046f-49d4-8435-f0e021b8096f tempest-DeleteServersAdminTestJSON-1602651255 tempest-DeleteServersAdminTestJSON-1602651255-project-member] Lock "85d6db13-d317-498e-a36a-972e9b36e82b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.372s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1689.776013] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6da2c6f0-6934-4e99-9bfe-309fcc14df69 tempest-VolumesAssistedSnapshotsTest-1116976750 tempest-VolumesAssistedSnapshotsTest-1116976750-project-admin] Lock "61fdfa06-cb40-44a3-8abc-428b26bd40f5" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.917s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1689.846729] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Releasing lock "refresh_cache-28b3bfc7-2bed-4941-9f48-8bd301e1a971" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1689.847077] env[63024]: DEBUG nova.compute.manager [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Instance network_info: |[{"id": "ab9681ef-e7dc-4992-be61-3ef37483b9b8", "address": "fa:16:3e:9b:40:e1", "network": {"id": "05e5c67b-622f-489b-803d-eb380e0adc8c", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-529483603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3a86fdb7cd0e415d9ec099d327fbdca3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aec0089a-ff85-4bef-bad8-c84de39af71a", "external-id": "nsx-vlan-transportzone-758", "segmentation_id": 758, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab9681ef-e7", "ovs_interfaceid": "ab9681ef-e7dc-4992-be61-3ef37483b9b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1689.847494] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9b:40:e1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aec0089a-ff85-4bef-bad8-c84de39af71a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ab9681ef-e7dc-4992-be61-3ef37483b9b8', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1689.855215] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Creating folder: Project (3a86fdb7cd0e415d9ec099d327fbdca3). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1689.857110] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-686481de-75c0-4bd2-93b2-c5dee2e62a07 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.866902] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Created folder: Project (3a86fdb7cd0e415d9ec099d327fbdca3) in parent group-v401959. [ 1689.866902] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Creating folder: Instances. Parent ref: group-v402071. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1689.866902] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d758a8f7-1efa-42c8-9d21-8fa4c9e55c7c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.880210] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Created folder: Instances in parent group-v402071. [ 1689.880482] env[63024]: DEBUG oslo.service.loopingcall [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1689.880684] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1689.880904] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fe5bb279-04f6-46f6-8b10-b8c1b0fbb97d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1689.897818] env[63024]: DEBUG nova.compute.manager [req-43869318-7bce-4aed-a09c-5dc025e242b9 req-e89c5646-9f75-4280-beb1-a0cc02e8aa89 service nova] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] Received event network-vif-plugged-e33e15ee-eb9d-4eaf-8eb7-845fe940d23d {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1689.898192] env[63024]: DEBUG oslo_concurrency.lockutils [req-43869318-7bce-4aed-a09c-5dc025e242b9 req-e89c5646-9f75-4280-beb1-a0cc02e8aa89 service nova] Acquiring lock "cc5cfa6d-d3db-4997-8413-2460e1124f02-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1689.898369] env[63024]: DEBUG oslo_concurrency.lockutils [req-43869318-7bce-4aed-a09c-5dc025e242b9 req-e89c5646-9f75-4280-beb1-a0cc02e8aa89 service nova] Lock "cc5cfa6d-d3db-4997-8413-2460e1124f02-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1689.898536] env[63024]: DEBUG oslo_concurrency.lockutils [req-43869318-7bce-4aed-a09c-5dc025e242b9 req-e89c5646-9f75-4280-beb1-a0cc02e8aa89 service nova] Lock "cc5cfa6d-d3db-4997-8413-2460e1124f02-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1689.898629] env[63024]: DEBUG nova.compute.manager [req-43869318-7bce-4aed-a09c-5dc025e242b9 req-e89c5646-9f75-4280-beb1-a0cc02e8aa89 service nova] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] No waiting events found dispatching network-vif-plugged-e33e15ee-eb9d-4eaf-8eb7-845fe940d23d {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1689.898768] env[63024]: WARNING nova.compute.manager [req-43869318-7bce-4aed-a09c-5dc025e242b9 req-e89c5646-9f75-4280-beb1-a0cc02e8aa89 service nova] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] Received unexpected event network-vif-plugged-e33e15ee-eb9d-4eaf-8eb7-845fe940d23d for instance with vm_state building and task_state spawning. [ 1689.903917] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1689.903917] env[63024]: value = "task-1950725" [ 1689.903917] env[63024]: _type = "Task" [ 1689.903917] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1689.917589] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950725, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1689.930820] env[63024]: DEBUG nova.network.neutron [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] Successfully updated port: e33e15ee-eb9d-4eaf-8eb7-845fe940d23d {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1689.930820] env[63024]: DEBUG nova.network.neutron [-] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1689.990090] env[63024]: DEBUG nova.compute.manager [req-71c66627-3b7d-4800-92be-f7f13e9fa539 req-0710e6cb-b540-42d1-b7d2-8b6dad76cba8 service nova] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Received event network-vif-plugged-ab9681ef-e7dc-4992-be61-3ef37483b9b8 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1689.991212] env[63024]: DEBUG oslo_concurrency.lockutils [req-71c66627-3b7d-4800-92be-f7f13e9fa539 req-0710e6cb-b540-42d1-b7d2-8b6dad76cba8 service nova] Acquiring lock "28b3bfc7-2bed-4941-9f48-8bd301e1a971-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1689.991212] env[63024]: DEBUG oslo_concurrency.lockutils [req-71c66627-3b7d-4800-92be-f7f13e9fa539 req-0710e6cb-b540-42d1-b7d2-8b6dad76cba8 service nova] Lock "28b3bfc7-2bed-4941-9f48-8bd301e1a971-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1689.991212] env[63024]: DEBUG oslo_concurrency.lockutils [req-71c66627-3b7d-4800-92be-f7f13e9fa539 req-0710e6cb-b540-42d1-b7d2-8b6dad76cba8 service nova] Lock "28b3bfc7-2bed-4941-9f48-8bd301e1a971-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1689.991212] env[63024]: DEBUG nova.compute.manager [req-71c66627-3b7d-4800-92be-f7f13e9fa539 req-0710e6cb-b540-42d1-b7d2-8b6dad76cba8 service nova] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] No waiting events found dispatching network-vif-plugged-ab9681ef-e7dc-4992-be61-3ef37483b9b8 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1689.991484] env[63024]: WARNING nova.compute.manager [req-71c66627-3b7d-4800-92be-f7f13e9fa539 req-0710e6cb-b540-42d1-b7d2-8b6dad76cba8 service nova] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Received unexpected event network-vif-plugged-ab9681ef-e7dc-4992-be61-3ef37483b9b8 for instance with vm_state building and task_state spawning. [ 1689.991547] env[63024]: DEBUG nova.compute.manager [req-71c66627-3b7d-4800-92be-f7f13e9fa539 req-0710e6cb-b540-42d1-b7d2-8b6dad76cba8 service nova] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Received event network-changed-ab9681ef-e7dc-4992-be61-3ef37483b9b8 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1689.991700] env[63024]: DEBUG nova.compute.manager [req-71c66627-3b7d-4800-92be-f7f13e9fa539 req-0710e6cb-b540-42d1-b7d2-8b6dad76cba8 service nova] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Refreshing instance network info cache due to event network-changed-ab9681ef-e7dc-4992-be61-3ef37483b9b8. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1689.991886] env[63024]: DEBUG oslo_concurrency.lockutils [req-71c66627-3b7d-4800-92be-f7f13e9fa539 req-0710e6cb-b540-42d1-b7d2-8b6dad76cba8 service nova] Acquiring lock "refresh_cache-28b3bfc7-2bed-4941-9f48-8bd301e1a971" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1689.992051] env[63024]: DEBUG oslo_concurrency.lockutils [req-71c66627-3b7d-4800-92be-f7f13e9fa539 req-0710e6cb-b540-42d1-b7d2-8b6dad76cba8 service nova] Acquired lock "refresh_cache-28b3bfc7-2bed-4941-9f48-8bd301e1a971" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1689.992232] env[63024]: DEBUG nova.network.neutron [req-71c66627-3b7d-4800-92be-f7f13e9fa539 req-0710e6cb-b540-42d1-b7d2-8b6dad76cba8 service nova] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Refreshing network info cache for port ab9681ef-e7dc-4992-be61-3ef37483b9b8 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1690.069965] env[63024]: DEBUG oslo_vmware.rw_handles [None req-037616c5-d6cf-4b90-bf58-aa2ad2350df3 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522fdc99-229a-950a-fdab-cd9eabca4680/disk-0.vmdk. {{(pid=63024) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1690.072074] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58f89a5a-c8d0-4d55-9fe3-fdc9b4d97afc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.078240] env[63024]: DEBUG oslo_vmware.rw_handles [None req-037616c5-d6cf-4b90-bf58-aa2ad2350df3 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522fdc99-229a-950a-fdab-cd9eabca4680/disk-0.vmdk is in state: ready. {{(pid=63024) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1690.078427] env[63024]: ERROR oslo_vmware.rw_handles [None req-037616c5-d6cf-4b90-bf58-aa2ad2350df3 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522fdc99-229a-950a-fdab-cd9eabca4680/disk-0.vmdk due to incomplete transfer. [ 1690.078645] env[63024]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-c330ad26-3162-496e-bd6d-f812d3ec2657 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.086273] env[63024]: DEBUG oslo_vmware.rw_handles [None req-037616c5-d6cf-4b90-bf58-aa2ad2350df3 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522fdc99-229a-950a-fdab-cd9eabca4680/disk-0.vmdk. {{(pid=63024) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1690.086273] env[63024]: DEBUG nova.virt.vmwareapi.images [None req-037616c5-d6cf-4b90-bf58-aa2ad2350df3 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Uploaded image e640f6db-ba55-4e75-9342-de97c265fd58 to the Glance image server {{(pid=63024) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1690.087887] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-037616c5-d6cf-4b90-bf58-aa2ad2350df3 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Destroying the VM {{(pid=63024) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1690.088225] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-c6d91acc-fd14-4875-bf1e-4b4d5de75234 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.093953] env[63024]: DEBUG oslo_vmware.api [None req-037616c5-d6cf-4b90-bf58-aa2ad2350df3 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Waiting for the task: (returnval){ [ 1690.093953] env[63024]: value = "task-1950726" [ 1690.093953] env[63024]: _type = "Task" [ 1690.093953] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1690.105233] env[63024]: DEBUG oslo_vmware.api [None req-037616c5-d6cf-4b90-bf58-aa2ad2350df3 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950726, 'name': Destroy_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.204236] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance ccd80e20-9fc2-415a-a428-fcf85994c7f8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1690.204236] env[63024]: WARNING nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance b629b4f8-f79f-4361-b78c-8705a6888a9e is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1690.204236] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 61fdfa06-cb40-44a3-8abc-428b26bd40f5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1690.204236] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1690.204774] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 7146277f-2621-4e8f-a14c-49bf4dd052db actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1690.204774] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1690.204774] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance b765b8b3-a099-4e23-be30-d1178ecffc37 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1690.204774] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1690.204886] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance bd07735a-6a75-45fb-9cef-e1f2c301a489 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1690.204886] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 9cf45c3a-2a74-4f8e-8817-47bbd748a44b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1690.204886] env[63024]: WARNING nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance d49eae54-cccb-4281-aaa0-d6974529eb7b is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1690.204974] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1690.207265] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance b7f26f0e-d5a9-42a6-8af2-065659f89cf5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1690.207265] env[63024]: WARNING nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1690.207265] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance e8ad74ce-7862-4574-98e7-14bc54bd5d6c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1690.207265] env[63024]: WARNING nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 00e925a1-9b79-46e2-b7f7-c0b63e1e72df is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1690.207265] env[63024]: WARNING nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 1ad97ed0-2a84-4783-8511-e0f6b24861bd is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1690.207534] env[63024]: WARNING nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 610dd030-5080-498a-8744-b1411297d70d is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1690.207534] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 8a826350-0fee-409d-a3fc-260d7d43bdf6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1690.207534] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 18444b47-476a-4ca3-9a4f-0dc58e652143 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1690.207534] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1690.207651] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 28b3bfc7-2bed-4941-9f48-8bd301e1a971 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1690.207651] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance cc5cfa6d-d3db-4997-8413-2460e1124f02 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1690.417029] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950725, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.438448] env[63024]: DEBUG oslo_concurrency.lockutils [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Acquiring lock "refresh_cache-cc5cfa6d-d3db-4997-8413-2460e1124f02" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1690.438448] env[63024]: DEBUG oslo_concurrency.lockutils [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Acquired lock "refresh_cache-cc5cfa6d-d3db-4997-8413-2460e1124f02" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1690.438448] env[63024]: DEBUG nova.network.neutron [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1690.441113] env[63024]: INFO nova.compute.manager [-] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] Took 1.43 seconds to deallocate network for instance. [ 1690.608746] env[63024]: DEBUG oslo_vmware.api [None req-037616c5-d6cf-4b90-bf58-aa2ad2350df3 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950726, 'name': Destroy_Task} progress is 33%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.710133] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 9679a1a2-b003-4a60-a812-8b3a9b5f545f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1690.744581] env[63024]: DEBUG nova.network.neutron [req-71c66627-3b7d-4800-92be-f7f13e9fa539 req-0710e6cb-b540-42d1-b7d2-8b6dad76cba8 service nova] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Updated VIF entry in instance network info cache for port ab9681ef-e7dc-4992-be61-3ef37483b9b8. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1690.744581] env[63024]: DEBUG nova.network.neutron [req-71c66627-3b7d-4800-92be-f7f13e9fa539 req-0710e6cb-b540-42d1-b7d2-8b6dad76cba8 service nova] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Updating instance_info_cache with network_info: [{"id": "ab9681ef-e7dc-4992-be61-3ef37483b9b8", "address": "fa:16:3e:9b:40:e1", "network": {"id": "05e5c67b-622f-489b-803d-eb380e0adc8c", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-529483603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3a86fdb7cd0e415d9ec099d327fbdca3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aec0089a-ff85-4bef-bad8-c84de39af71a", "external-id": "nsx-vlan-transportzone-758", "segmentation_id": 758, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab9681ef-e7", "ovs_interfaceid": "ab9681ef-e7dc-4992-be61-3ef37483b9b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1690.819650] env[63024]: DEBUG oslo_concurrency.lockutils [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquiring lock "b588ea21-dea0-4ee6-8f9e-12007d0a1ce1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1690.819997] env[63024]: DEBUG oslo_concurrency.lockutils [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Lock "b588ea21-dea0-4ee6-8f9e-12007d0a1ce1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1690.914667] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950725, 'name': CreateVM_Task, 'duration_secs': 0.562313} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1690.914844] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1690.915586] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1690.915760] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1690.916194] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1690.916771] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9dcb836c-a13b-4baf-9642-13ad924550ea {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1690.921827] env[63024]: DEBUG oslo_vmware.api [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Waiting for the task: (returnval){ [ 1690.921827] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5290bf05-7c72-7957-d78f-0efeda24fabe" [ 1690.921827] env[63024]: _type = "Task" [ 1690.921827] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1690.930165] env[63024]: DEBUG oslo_vmware.api [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5290bf05-7c72-7957-d78f-0efeda24fabe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1690.951600] env[63024]: DEBUG oslo_concurrency.lockutils [None req-148b4eee-1743-4e98-81cf-df60b01cb2f8 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1690.984781] env[63024]: DEBUG nova.network.neutron [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1691.105773] env[63024]: DEBUG oslo_vmware.api [None req-037616c5-d6cf-4b90-bf58-aa2ad2350df3 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950726, 'name': Destroy_Task, 'duration_secs': 0.633013} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1691.106202] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-037616c5-d6cf-4b90-bf58-aa2ad2350df3 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Destroyed the VM [ 1691.106259] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-037616c5-d6cf-4b90-bf58-aa2ad2350df3 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Deleting Snapshot of the VM instance {{(pid=63024) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1691.108681] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-7946ac70-262b-479b-a53a-fbb3dcdcd42f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.115301] env[63024]: DEBUG oslo_vmware.api [None req-037616c5-d6cf-4b90-bf58-aa2ad2350df3 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Waiting for the task: (returnval){ [ 1691.115301] env[63024]: value = "task-1950727" [ 1691.115301] env[63024]: _type = "Task" [ 1691.115301] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1691.123821] env[63024]: DEBUG oslo_vmware.api [None req-037616c5-d6cf-4b90-bf58-aa2ad2350df3 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950727, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.143581] env[63024]: DEBUG nova.network.neutron [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] Updating instance_info_cache with network_info: [{"id": "e33e15ee-eb9d-4eaf-8eb7-845fe940d23d", "address": "fa:16:3e:59:7d:46", "network": {"id": "e63bb01d-1b14-4d82-8937-f992a14144ab", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1976859150-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39daa869227548e6be78a2244551deea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6e2a9679-6746-40f2-951c-65fcd1af5f7b", "external-id": "nsx-vlan-transportzone-39", "segmentation_id": 39, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape33e15ee-eb", "ovs_interfaceid": "e33e15ee-eb9d-4eaf-8eb7-845fe940d23d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1691.217194] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance f6fddc23-ad36-4d6f-82a2-ded456b2596e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1691.249102] env[63024]: DEBUG oslo_concurrency.lockutils [req-71c66627-3b7d-4800-92be-f7f13e9fa539 req-0710e6cb-b540-42d1-b7d2-8b6dad76cba8 service nova] Releasing lock "refresh_cache-28b3bfc7-2bed-4941-9f48-8bd301e1a971" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1691.249409] env[63024]: DEBUG nova.compute.manager [req-71c66627-3b7d-4800-92be-f7f13e9fa539 req-0710e6cb-b540-42d1-b7d2-8b6dad76cba8 service nova] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] Received event network-vif-deleted-77a205c3-534e-4d19-9df6-e9009252110a {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1691.259105] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b7c629ff-1b8c-4a8b-9447-1ce2f3d21a62 tempest-VolumesAssistedSnapshotsTest-1116976750 tempest-VolumesAssistedSnapshotsTest-1116976750-project-admin] Acquiring lock "61fdfa06-cb40-44a3-8abc-428b26bd40f5" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1691.259105] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b7c629ff-1b8c-4a8b-9447-1ce2f3d21a62 tempest-VolumesAssistedSnapshotsTest-1116976750 tempest-VolumesAssistedSnapshotsTest-1116976750-project-admin] Lock "61fdfa06-cb40-44a3-8abc-428b26bd40f5" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1691.433699] env[63024]: DEBUG oslo_vmware.api [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5290bf05-7c72-7957-d78f-0efeda24fabe, 'name': SearchDatastore_Task, 'duration_secs': 0.010145} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1691.434334] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1691.434733] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1691.435136] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1691.435668] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1691.436075] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1691.436580] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1b9cc83c-3c70-4096-bbb1-7eae6e13d5f4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.448040] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1691.448040] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1691.452044] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03bdd1a3-c6dc-4a24-ad41-093ed198d6ae {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.455162] env[63024]: DEBUG oslo_vmware.api [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Waiting for the task: (returnval){ [ 1691.455162] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5227e809-15d4-d4b0-2a5e-9f119a088ff2" [ 1691.455162] env[63024]: _type = "Task" [ 1691.455162] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1691.465804] env[63024]: DEBUG oslo_vmware.api [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5227e809-15d4-d4b0-2a5e-9f119a088ff2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.631484] env[63024]: DEBUG oslo_vmware.api [None req-037616c5-d6cf-4b90-bf58-aa2ad2350df3 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950727, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.634394] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquiring lock "31a693b6-293a-4f01-9baf-a9e7e8d453d4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1691.634922] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Lock "31a693b6-293a-4f01-9baf-a9e7e8d453d4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1691.646013] env[63024]: DEBUG oslo_concurrency.lockutils [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Releasing lock "refresh_cache-cc5cfa6d-d3db-4997-8413-2460e1124f02" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1691.646314] env[63024]: DEBUG nova.compute.manager [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] Instance network_info: |[{"id": "e33e15ee-eb9d-4eaf-8eb7-845fe940d23d", "address": "fa:16:3e:59:7d:46", "network": {"id": "e63bb01d-1b14-4d82-8937-f992a14144ab", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1976859150-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39daa869227548e6be78a2244551deea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6e2a9679-6746-40f2-951c-65fcd1af5f7b", "external-id": "nsx-vlan-transportzone-39", "segmentation_id": 39, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape33e15ee-eb", "ovs_interfaceid": "e33e15ee-eb9d-4eaf-8eb7-845fe940d23d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1691.646682] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:59:7d:46', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6e2a9679-6746-40f2-951c-65fcd1af5f7b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e33e15ee-eb9d-4eaf-8eb7-845fe940d23d', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1691.654354] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Creating folder: Project (39daa869227548e6be78a2244551deea). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1691.655077] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a97efd0e-c2c3-402c-8158-2979720a5c30 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.663880] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Created folder: Project (39daa869227548e6be78a2244551deea) in parent group-v401959. [ 1691.664094] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Creating folder: Instances. Parent ref: group-v402074. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1691.664733] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c3aba237-3453-489c-928d-ce5f8d07bad5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.673205] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Created folder: Instances in parent group-v402074. [ 1691.673443] env[63024]: DEBUG oslo.service.loopingcall [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1691.673622] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1691.673825] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d812b311-018a-495a-b57e-15856865da00 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.694390] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1691.694390] env[63024]: value = "task-1950730" [ 1691.694390] env[63024]: _type = "Task" [ 1691.694390] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1691.704981] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950730, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1691.717538] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 650a97b9-911e-44b0-9e82-a6d4cc95c9dd has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1691.762556] env[63024]: INFO nova.compute.manager [None req-b7c629ff-1b8c-4a8b-9447-1ce2f3d21a62 tempest-VolumesAssistedSnapshotsTest-1116976750 tempest-VolumesAssistedSnapshotsTest-1116976750-project-admin] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Detaching volume 227b97fd-a50b-436c-b51c-e971b3da8e00 [ 1691.884569] env[63024]: INFO nova.virt.block_device [None req-b7c629ff-1b8c-4a8b-9447-1ce2f3d21a62 tempest-VolumesAssistedSnapshotsTest-1116976750 tempest-VolumesAssistedSnapshotsTest-1116976750-project-admin] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Attempting to driver detach volume 227b97fd-a50b-436c-b51c-e971b3da8e00 from mountpoint /dev/sdb [ 1691.884863] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-b7c629ff-1b8c-4a8b-9447-1ce2f3d21a62 tempest-VolumesAssistedSnapshotsTest-1116976750 tempest-VolumesAssistedSnapshotsTest-1116976750-project-admin] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Volume detach. Driver type: vmdk {{(pid=63024) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1691.885095] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-b7c629ff-1b8c-4a8b-9447-1ce2f3d21a62 tempest-VolumesAssistedSnapshotsTest-1116976750 tempest-VolumesAssistedSnapshotsTest-1116976750-project-admin] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402070', 'volume_id': '227b97fd-a50b-436c-b51c-e971b3da8e00', 'name': 'volume-227b97fd-a50b-436c-b51c-e971b3da8e00', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '61fdfa06-cb40-44a3-8abc-428b26bd40f5', 'attached_at': '', 'detached_at': '', 'volume_id': '227b97fd-a50b-436c-b51c-e971b3da8e00', 'serial': '227b97fd-a50b-436c-b51c-e971b3da8e00'} {{(pid=63024) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1691.886072] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d2ee5ae-e4e4-4ce2-ba42-9d43e93cfacb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.910549] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aaf49a1-8e71-4867-8df8-cd30966d2474 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.920147] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d219fe48-278e-4215-b23b-aba812526940 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.930840] env[63024]: DEBUG nova.compute.manager [req-718299a6-4959-43fc-991a-2ece7332c65c req-171cb269-e9ab-44c0-91e3-a9fbc2ea1ec5 service nova] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] Received event network-changed-e33e15ee-eb9d-4eaf-8eb7-845fe940d23d {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1691.931075] env[63024]: DEBUG nova.compute.manager [req-718299a6-4959-43fc-991a-2ece7332c65c req-171cb269-e9ab-44c0-91e3-a9fbc2ea1ec5 service nova] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] Refreshing instance network info cache due to event network-changed-e33e15ee-eb9d-4eaf-8eb7-845fe940d23d. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1691.931299] env[63024]: DEBUG oslo_concurrency.lockutils [req-718299a6-4959-43fc-991a-2ece7332c65c req-171cb269-e9ab-44c0-91e3-a9fbc2ea1ec5 service nova] Acquiring lock "refresh_cache-cc5cfa6d-d3db-4997-8413-2460e1124f02" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1691.931436] env[63024]: DEBUG oslo_concurrency.lockutils [req-718299a6-4959-43fc-991a-2ece7332c65c req-171cb269-e9ab-44c0-91e3-a9fbc2ea1ec5 service nova] Acquired lock "refresh_cache-cc5cfa6d-d3db-4997-8413-2460e1124f02" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1691.931591] env[63024]: DEBUG nova.network.neutron [req-718299a6-4959-43fc-991a-2ece7332c65c req-171cb269-e9ab-44c0-91e3-a9fbc2ea1ec5 service nova] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] Refreshing network info cache for port e33e15ee-eb9d-4eaf-8eb7-845fe940d23d {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1691.953528] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc7af895-44fc-4fd8-b018-51aeb39b051f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.967149] env[63024]: DEBUG oslo_vmware.api [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5227e809-15d4-d4b0-2a5e-9f119a088ff2, 'name': SearchDatastore_Task, 'duration_secs': 0.008124} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1691.978846] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-b7c629ff-1b8c-4a8b-9447-1ce2f3d21a62 tempest-VolumesAssistedSnapshotsTest-1116976750 tempest-VolumesAssistedSnapshotsTest-1116976750-project-admin] The volume has not been displaced from its original location: [datastore1] volume-227b97fd-a50b-436c-b51c-e971b3da8e00/volume-227b97fd-a50b-436c-b51c-e971b3da8e00.vmdk. No consolidation needed. {{(pid=63024) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1691.984743] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-b7c629ff-1b8c-4a8b-9447-1ce2f3d21a62 tempest-VolumesAssistedSnapshotsTest-1116976750 tempest-VolumesAssistedSnapshotsTest-1116976750-project-admin] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Reconfiguring VM instance instance-00000007 to detach disk 2001 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1691.985109] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eea12d3a-3ed9-48e9-8156-a1e006d8b56e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1691.991144] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c827e8db-4da7-4edd-a9ab-f892bfdd9dda {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.009325] env[63024]: DEBUG oslo_vmware.api [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Waiting for the task: (returnval){ [ 1692.009325] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b04c66-f44d-e7e5-2c05-5f6c7ed544a3" [ 1692.009325] env[63024]: _type = "Task" [ 1692.009325] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1692.010860] env[63024]: DEBUG oslo_vmware.api [None req-b7c629ff-1b8c-4a8b-9447-1ce2f3d21a62 tempest-VolumesAssistedSnapshotsTest-1116976750 tempest-VolumesAssistedSnapshotsTest-1116976750-project-admin] Waiting for the task: (returnval){ [ 1692.010860] env[63024]: value = "task-1950731" [ 1692.010860] env[63024]: _type = "Task" [ 1692.010860] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1692.026068] env[63024]: DEBUG oslo_vmware.api [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b04c66-f44d-e7e5-2c05-5f6c7ed544a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.029165] env[63024]: DEBUG oslo_vmware.api [None req-b7c629ff-1b8c-4a8b-9447-1ce2f3d21a62 tempest-VolumesAssistedSnapshotsTest-1116976750 tempest-VolumesAssistedSnapshotsTest-1116976750-project-admin] Task: {'id': task-1950731, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.130986] env[63024]: DEBUG oslo_vmware.api [None req-037616c5-d6cf-4b90-bf58-aa2ad2350df3 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950727, 'name': RemoveSnapshot_Task, 'duration_secs': 0.539964} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1692.130986] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-037616c5-d6cf-4b90-bf58-aa2ad2350df3 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Deleted Snapshot of the VM instance {{(pid=63024) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1692.130986] env[63024]: INFO nova.compute.manager [None req-037616c5-d6cf-4b90-bf58-aa2ad2350df3 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Took 16.33 seconds to snapshot the instance on the hypervisor. [ 1692.205512] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950730, 'name': CreateVM_Task, 'duration_secs': 0.354813} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1692.205687] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1692.206403] env[63024]: DEBUG oslo_concurrency.lockutils [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1692.206626] env[63024]: DEBUG oslo_concurrency.lockutils [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1692.206859] env[63024]: DEBUG oslo_concurrency.lockutils [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1692.207141] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-764026e9-cebc-445f-a813-a9d54550fc78 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.211763] env[63024]: DEBUG oslo_vmware.api [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Waiting for the task: (returnval){ [ 1692.211763] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52f04dbd-b030-0568-b6c1-bc37b6cde090" [ 1692.211763] env[63024]: _type = "Task" [ 1692.211763] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1692.220660] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 49eb6292-012a-4296-aff8-9c460866a602 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1692.221928] env[63024]: DEBUG oslo_vmware.api [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52f04dbd-b030-0568-b6c1-bc37b6cde090, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.529504] env[63024]: DEBUG oslo_vmware.api [None req-b7c629ff-1b8c-4a8b-9447-1ce2f3d21a62 tempest-VolumesAssistedSnapshotsTest-1116976750 tempest-VolumesAssistedSnapshotsTest-1116976750-project-admin] Task: {'id': task-1950731, 'name': ReconfigVM_Task, 'duration_secs': 0.347834} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1692.529504] env[63024]: DEBUG oslo_vmware.api [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b04c66-f44d-e7e5-2c05-5f6c7ed544a3, 'name': SearchDatastore_Task, 'duration_secs': 0.019656} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1692.529504] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-b7c629ff-1b8c-4a8b-9447-1ce2f3d21a62 tempest-VolumesAssistedSnapshotsTest-1116976750 tempest-VolumesAssistedSnapshotsTest-1116976750-project-admin] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Reconfigured VM instance instance-00000007 to detach disk 2001 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1692.534355] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1692.534355] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 28b3bfc7-2bed-4941-9f48-8bd301e1a971/28b3bfc7-2bed-4941-9f48-8bd301e1a971.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1692.534524] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9e6d7b78-c4d8-428e-8260-9982a61a90e5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.550557] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8b380727-404e-42bd-a86d-d6273c6a8260 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.555223] env[63024]: DEBUG oslo_vmware.api [None req-b7c629ff-1b8c-4a8b-9447-1ce2f3d21a62 tempest-VolumesAssistedSnapshotsTest-1116976750 tempest-VolumesAssistedSnapshotsTest-1116976750-project-admin] Waiting for the task: (returnval){ [ 1692.555223] env[63024]: value = "task-1950732" [ 1692.555223] env[63024]: _type = "Task" [ 1692.555223] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1692.556488] env[63024]: DEBUG oslo_vmware.api [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Waiting for the task: (returnval){ [ 1692.556488] env[63024]: value = "task-1950733" [ 1692.556488] env[63024]: _type = "Task" [ 1692.556488] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1692.568772] env[63024]: DEBUG oslo_vmware.api [None req-b7c629ff-1b8c-4a8b-9447-1ce2f3d21a62 tempest-VolumesAssistedSnapshotsTest-1116976750 tempest-VolumesAssistedSnapshotsTest-1116976750-project-admin] Task: {'id': task-1950732, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.573779] env[63024]: DEBUG oslo_vmware.api [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Task: {'id': task-1950733, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1692.679587] env[63024]: DEBUG nova.network.neutron [req-718299a6-4959-43fc-991a-2ece7332c65c req-171cb269-e9ab-44c0-91e3-a9fbc2ea1ec5 service nova] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] Updated VIF entry in instance network info cache for port e33e15ee-eb9d-4eaf-8eb7-845fe940d23d. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1692.680082] env[63024]: DEBUG nova.network.neutron [req-718299a6-4959-43fc-991a-2ece7332c65c req-171cb269-e9ab-44c0-91e3-a9fbc2ea1ec5 service nova] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] Updating instance_info_cache with network_info: [{"id": "e33e15ee-eb9d-4eaf-8eb7-845fe940d23d", "address": "fa:16:3e:59:7d:46", "network": {"id": "e63bb01d-1b14-4d82-8937-f992a14144ab", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1976859150-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "39daa869227548e6be78a2244551deea", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6e2a9679-6746-40f2-951c-65fcd1af5f7b", "external-id": "nsx-vlan-transportzone-39", "segmentation_id": 39, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape33e15ee-eb", "ovs_interfaceid": "e33e15ee-eb9d-4eaf-8eb7-845fe940d23d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1692.724331] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 8edc24d6-9073-4836-b14b-422df3ac1b88 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1692.727436] env[63024]: DEBUG oslo_vmware.api [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52f04dbd-b030-0568-b6c1-bc37b6cde090, 'name': SearchDatastore_Task, 'duration_secs': 0.03152} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1692.728584] env[63024]: DEBUG oslo_concurrency.lockutils [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1692.728584] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1692.731503] env[63024]: DEBUG oslo_concurrency.lockutils [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1692.731503] env[63024]: DEBUG oslo_concurrency.lockutils [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1692.731503] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1692.731503] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f531a385-ddf9-48d6-b6ba-ba44bef672c0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.739724] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1692.739901] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1692.740650] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2065be2-3352-4de4-b361-8eaebc1163bd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1692.747303] env[63024]: DEBUG oslo_vmware.api [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Waiting for the task: (returnval){ [ 1692.747303] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5275d3eb-627c-b221-6de5-bd7a6f0c9011" [ 1692.747303] env[63024]: _type = "Task" [ 1692.747303] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1692.757636] env[63024]: DEBUG oslo_vmware.api [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5275d3eb-627c-b221-6de5-bd7a6f0c9011, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.070111] env[63024]: DEBUG oslo_vmware.api [None req-b7c629ff-1b8c-4a8b-9447-1ce2f3d21a62 tempest-VolumesAssistedSnapshotsTest-1116976750 tempest-VolumesAssistedSnapshotsTest-1116976750-project-admin] Task: {'id': task-1950732, 'name': ReconfigVM_Task, 'duration_secs': 0.141657} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1693.073417] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-b7c629ff-1b8c-4a8b-9447-1ce2f3d21a62 tempest-VolumesAssistedSnapshotsTest-1116976750 tempest-VolumesAssistedSnapshotsTest-1116976750-project-admin] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402070', 'volume_id': '227b97fd-a50b-436c-b51c-e971b3da8e00', 'name': 'volume-227b97fd-a50b-436c-b51c-e971b3da8e00', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '61fdfa06-cb40-44a3-8abc-428b26bd40f5', 'attached_at': '', 'detached_at': '', 'volume_id': '227b97fd-a50b-436c-b51c-e971b3da8e00', 'serial': '227b97fd-a50b-436c-b51c-e971b3da8e00'} {{(pid=63024) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1693.075287] env[63024]: DEBUG oslo_vmware.api [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Task: {'id': task-1950733, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.182892] env[63024]: DEBUG oslo_concurrency.lockutils [req-718299a6-4959-43fc-991a-2ece7332c65c req-171cb269-e9ab-44c0-91e3-a9fbc2ea1ec5 service nova] Releasing lock "refresh_cache-cc5cfa6d-d3db-4997-8413-2460e1124f02" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1693.229593] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance df2933d1-32c3-48a6-8ceb-d5e3047d0b78 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1693.264292] env[63024]: DEBUG oslo_vmware.api [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5275d3eb-627c-b221-6de5-bd7a6f0c9011, 'name': SearchDatastore_Task, 'duration_secs': 0.017344} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1693.265258] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-651b160f-4d1d-4800-9e37-c51aeeaf9118 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.271335] env[63024]: DEBUG oslo_vmware.api [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Waiting for the task: (returnval){ [ 1693.271335] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52af442a-835c-dadf-46c2-d4f674e99d79" [ 1693.271335] env[63024]: _type = "Task" [ 1693.271335] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1693.282397] env[63024]: DEBUG oslo_vmware.api [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52af442a-835c-dadf-46c2-d4f674e99d79, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.571602] env[63024]: DEBUG oslo_vmware.api [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Task: {'id': task-1950733, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.991101} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1693.571889] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 28b3bfc7-2bed-4941-9f48-8bd301e1a971/28b3bfc7-2bed-4941-9f48-8bd301e1a971.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1693.572147] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1693.572395] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-87de5684-35fa-4450-a74a-dcda36614620 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.579848] env[63024]: DEBUG oslo_vmware.api [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Waiting for the task: (returnval){ [ 1693.579848] env[63024]: value = "task-1950734" [ 1693.579848] env[63024]: _type = "Task" [ 1693.579848] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1693.587215] env[63024]: DEBUG oslo_vmware.api [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Task: {'id': task-1950734, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1693.630625] env[63024]: DEBUG nova.objects.instance [None req-b7c629ff-1b8c-4a8b-9447-1ce2f3d21a62 tempest-VolumesAssistedSnapshotsTest-1116976750 tempest-VolumesAssistedSnapshotsTest-1116976750-project-admin] Lazy-loading 'flavor' on Instance uuid 61fdfa06-cb40-44a3-8abc-428b26bd40f5 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1693.732865] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 3815d381-760d-40fc-98cf-8e6af287007f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1693.783250] env[63024]: DEBUG oslo_vmware.api [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52af442a-835c-dadf-46c2-d4f674e99d79, 'name': SearchDatastore_Task, 'duration_secs': 0.061876} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1693.783939] env[63024]: DEBUG oslo_concurrency.lockutils [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1693.784409] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] cc5cfa6d-d3db-4997-8413-2460e1124f02/cc5cfa6d-d3db-4997-8413-2460e1124f02.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1693.784807] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7fdbd7ac-cf68-42ce-a306-a77b6d9b1a16 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.793019] env[63024]: DEBUG oslo_vmware.api [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Waiting for the task: (returnval){ [ 1693.793019] env[63024]: value = "task-1950735" [ 1693.793019] env[63024]: _type = "Task" [ 1693.793019] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1693.804571] env[63024]: DEBUG oslo_vmware.api [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Task: {'id': task-1950735, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.087517] env[63024]: DEBUG oslo_vmware.api [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Task: {'id': task-1950734, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064087} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1694.088176] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1694.088726] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9db6cffc-de9d-45a9-af35-4ef52c501e44 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.115456] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Reconfiguring VM instance instance-00000023 to attach disk [datastore1] 28b3bfc7-2bed-4941-9f48-8bd301e1a971/28b3bfc7-2bed-4941-9f48-8bd301e1a971.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1694.115456] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b41c17b8-790a-4a0b-bf31-edd82ee6ac0b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.139592] env[63024]: DEBUG oslo_vmware.api [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Waiting for the task: (returnval){ [ 1694.139592] env[63024]: value = "task-1950736" [ 1694.139592] env[63024]: _type = "Task" [ 1694.139592] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1694.146358] env[63024]: DEBUG oslo_vmware.api [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Task: {'id': task-1950736, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.237132] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 37792b57-3347-4134-a060-53359afa3298 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1694.304281] env[63024]: DEBUG oslo_vmware.api [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Task: {'id': task-1950735, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.647463] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b7c629ff-1b8c-4a8b-9447-1ce2f3d21a62 tempest-VolumesAssistedSnapshotsTest-1116976750 tempest-VolumesAssistedSnapshotsTest-1116976750-project-admin] Lock "61fdfa06-cb40-44a3-8abc-428b26bd40f5" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.389s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1694.653760] env[63024]: DEBUG oslo_vmware.api [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Task: {'id': task-1950736, 'name': ReconfigVM_Task, 'duration_secs': 0.440232} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1694.654101] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Reconfigured VM instance instance-00000023 to attach disk [datastore1] 28b3bfc7-2bed-4941-9f48-8bd301e1a971/28b3bfc7-2bed-4941-9f48-8bd301e1a971.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1694.654865] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2e7c323c-2f90-4b9f-acaa-8ca0d181e0ba {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.660893] env[63024]: DEBUG oslo_vmware.api [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Waiting for the task: (returnval){ [ 1694.660893] env[63024]: value = "task-1950738" [ 1694.660893] env[63024]: _type = "Task" [ 1694.660893] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1694.669317] env[63024]: DEBUG oslo_vmware.api [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Task: {'id': task-1950738, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.740705] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance c1fd4146-6dd3-49e9-a744-466e6168e158 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1694.804964] env[63024]: DEBUG oslo_vmware.api [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Task: {'id': task-1950735, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.766497} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1694.805311] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] cc5cfa6d-d3db-4997-8413-2460e1124f02/cc5cfa6d-d3db-4997-8413-2460e1124f02.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1694.805540] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1694.805798] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3773c75a-b9d3-4fe4-86ef-57cff0755500 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.812785] env[63024]: DEBUG oslo_vmware.api [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Waiting for the task: (returnval){ [ 1694.812785] env[63024]: value = "task-1950739" [ 1694.812785] env[63024]: _type = "Task" [ 1694.812785] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1694.824857] env[63024]: DEBUG oslo_vmware.api [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Task: {'id': task-1950739, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.171721] env[63024]: DEBUG oslo_vmware.api [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Task: {'id': task-1950738, 'name': Rename_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.245084] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 82b7019c-5049-4b8b-abb4-46f326ce3d5b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1695.245084] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Total usable vcpus: 48, total allocated vcpus: 17 {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1695.245084] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3840MB phys_disk=200GB used_disk=17GB total_vcpus=48 used_vcpus=17 pci_stats=[] {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1695.325140] env[63024]: DEBUG oslo_vmware.api [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Task: {'id': task-1950739, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061868} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1695.325140] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1695.325897] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd9bb645-a2e5-4851-ab9e-fce03788f405 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.353873] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] Reconfiguring VM instance instance-00000024 to attach disk [datastore1] cc5cfa6d-d3db-4997-8413-2460e1124f02/cc5cfa6d-d3db-4997-8413-2460e1124f02.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1695.356962] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f6942bf0-bd72-495e-9060-b3db3ab8fb51 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.380493] env[63024]: DEBUG oslo_vmware.api [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Waiting for the task: (returnval){ [ 1695.380493] env[63024]: value = "task-1950740" [ 1695.380493] env[63024]: _type = "Task" [ 1695.380493] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1695.380904] env[63024]: DEBUG oslo_concurrency.lockutils [None req-19e713c2-b35b-4f4c-874b-38e9317a2ced tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Acquiring lock "bd07735a-6a75-45fb-9cef-e1f2c301a489" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1695.381145] env[63024]: DEBUG oslo_concurrency.lockutils [None req-19e713c2-b35b-4f4c-874b-38e9317a2ced tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Lock "bd07735a-6a75-45fb-9cef-e1f2c301a489" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1695.382051] env[63024]: DEBUG oslo_concurrency.lockutils [None req-19e713c2-b35b-4f4c-874b-38e9317a2ced tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Acquiring lock "bd07735a-6a75-45fb-9cef-e1f2c301a489-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1695.382051] env[63024]: DEBUG oslo_concurrency.lockutils [None req-19e713c2-b35b-4f4c-874b-38e9317a2ced tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Lock "bd07735a-6a75-45fb-9cef-e1f2c301a489-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1695.382051] env[63024]: DEBUG oslo_concurrency.lockutils [None req-19e713c2-b35b-4f4c-874b-38e9317a2ced tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Lock "bd07735a-6a75-45fb-9cef-e1f2c301a489-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1695.388141] env[63024]: INFO nova.compute.manager [None req-19e713c2-b35b-4f4c-874b-38e9317a2ced tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Terminating instance [ 1695.397038] env[63024]: DEBUG oslo_vmware.api [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Task: {'id': task-1950740, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.673782] env[63024]: DEBUG oslo_vmware.api [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Task: {'id': task-1950738, 'name': Rename_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1695.789131] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26a45194-a558-47dd-aaca-dab91437a25a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.796449] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e671aeb3-f07b-4418-bd4f-3d389e74c661 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.587817] env[63024]: DEBUG nova.compute.manager [None req-19e713c2-b35b-4f4c-874b-38e9317a2ced tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1696.588078] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-19e713c2-b35b-4f4c-874b-38e9317a2ced tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1696.592452] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f998fa3-e152-4fa6-98e8-2f1dcdeae072 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.602828] env[63024]: DEBUG oslo_vmware.api [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Task: {'id': task-1950740, 'name': ReconfigVM_Task, 'duration_secs': 0.261856} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1696.630388] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] Reconfigured VM instance instance-00000024 to attach disk [datastore1] cc5cfa6d-d3db-4997-8413-2460e1124f02/cc5cfa6d-d3db-4997-8413-2460e1124f02.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1696.631303] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-19e713c2-b35b-4f4c-874b-38e9317a2ced tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1696.631544] env[63024]: DEBUG oslo_vmware.api [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Task: {'id': task-1950738, 'name': Rename_Task, 'duration_secs': 1.110791} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1696.631739] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-493d1700-f912-487a-b992-9e11dee6ef91 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.633783] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e853e2ef-4031-428c-b362-5d126edd9955 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.636033] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-926e264e-3b0e-44a3-a40d-b1758d7df945 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.637494] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1696.637699] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f2440593-aa33-4255-bc19-19d8d1cd09bc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.646843] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-161d4c14-9997-4ec6-9094-23a5d61b34ce {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.650707] env[63024]: DEBUG oslo_vmware.api [None req-19e713c2-b35b-4f4c-874b-38e9317a2ced tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Waiting for the task: (returnval){ [ 1696.650707] env[63024]: value = "task-1950745" [ 1696.650707] env[63024]: _type = "Task" [ 1696.650707] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1696.650961] env[63024]: DEBUG oslo_vmware.api [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Waiting for the task: (returnval){ [ 1696.650961] env[63024]: value = "task-1950746" [ 1696.650961] env[63024]: _type = "Task" [ 1696.650961] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1696.652151] env[63024]: DEBUG oslo_vmware.api [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Waiting for the task: (returnval){ [ 1696.652151] env[63024]: value = "task-1950744" [ 1696.652151] env[63024]: _type = "Task" [ 1696.652151] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1696.669173] env[63024]: DEBUG nova.compute.provider_tree [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1696.676337] env[63024]: DEBUG oslo_vmware.api [None req-19e713c2-b35b-4f4c-874b-38e9317a2ced tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950745, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.682672] env[63024]: DEBUG oslo_vmware.api [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Task: {'id': task-1950746, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1696.682947] env[63024]: DEBUG oslo_vmware.api [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Task: {'id': task-1950744, 'name': Rename_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1697.166751] env[63024]: DEBUG oslo_vmware.api [None req-19e713c2-b35b-4f4c-874b-38e9317a2ced tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950745, 'name': PowerOffVM_Task, 'duration_secs': 0.201431} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1697.169895] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-19e713c2-b35b-4f4c-874b-38e9317a2ced tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1697.170108] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-19e713c2-b35b-4f4c-874b-38e9317a2ced tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1697.170392] env[63024]: DEBUG oslo_vmware.api [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Task: {'id': task-1950746, 'name': PowerOnVM_Task, 'duration_secs': 0.457196} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1697.170630] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-eb233380-9764-424c-a7cb-e991e67433a1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.172079] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1697.172284] env[63024]: INFO nova.compute.manager [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Took 10.84 seconds to spawn the instance on the hypervisor. [ 1697.172457] env[63024]: DEBUG nova.compute.manager [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1697.175881] env[63024]: DEBUG nova.scheduler.client.report [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1697.179718] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4e61304-86ae-4dca-984a-598e421fab0a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.182284] env[63024]: DEBUG oslo_vmware.api [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Task: {'id': task-1950744, 'name': Rename_Task, 'duration_secs': 0.184943} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1697.183108] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1697.183619] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-730cc811-1e8c-4ca3-bc5b-27f25133f5e0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.193102] env[63024]: DEBUG oslo_vmware.api [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Waiting for the task: (returnval){ [ 1697.193102] env[63024]: value = "task-1950748" [ 1697.193102] env[63024]: _type = "Task" [ 1697.193102] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1697.202424] env[63024]: DEBUG oslo_vmware.api [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Task: {'id': task-1950748, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1697.258717] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-19e713c2-b35b-4f4c-874b-38e9317a2ced tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1697.258943] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-19e713c2-b35b-4f4c-874b-38e9317a2ced tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1697.259140] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-19e713c2-b35b-4f4c-874b-38e9317a2ced tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Deleting the datastore file [datastore1] bd07735a-6a75-45fb-9cef-e1f2c301a489 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1697.259407] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d4b6c6ae-c927-4617-a949-eaf4b220fa2f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.266710] env[63024]: DEBUG oslo_vmware.api [None req-19e713c2-b35b-4f4c-874b-38e9317a2ced tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Waiting for the task: (returnval){ [ 1697.266710] env[63024]: value = "task-1950749" [ 1697.266710] env[63024]: _type = "Task" [ 1697.266710] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1697.274737] env[63024]: DEBUG oslo_vmware.api [None req-19e713c2-b35b-4f4c-874b-38e9317a2ced tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950749, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1697.683590] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63024) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1697.683899] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 8.543s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1697.684191] env[63024]: DEBUG oslo_concurrency.lockutils [None req-00a457ee-1f38-45d0-a954-b2bf3d01c737 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 40.336s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1697.684379] env[63024]: DEBUG oslo_concurrency.lockutils [None req-00a457ee-1f38-45d0-a954-b2bf3d01c737 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1697.686527] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 40.024s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1697.687935] env[63024]: INFO nova.compute.claims [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1697.708146] env[63024]: INFO nova.compute.manager [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Took 50.70 seconds to build instance. [ 1697.716738] env[63024]: DEBUG oslo_vmware.api [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Task: {'id': task-1950748, 'name': PowerOnVM_Task, 'duration_secs': 0.472396} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1697.717267] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1697.717466] env[63024]: INFO nova.compute.manager [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] Took 8.77 seconds to spawn the instance on the hypervisor. [ 1697.719397] env[63024]: DEBUG nova.compute.manager [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1697.721075] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3462e2c7-bb68-4428-b9d2-711f4199bc6c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1697.727873] env[63024]: INFO nova.scheduler.client.report [None req-00a457ee-1f38-45d0-a954-b2bf3d01c737 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Deleted allocations for instance d49eae54-cccb-4281-aaa0-d6974529eb7b [ 1697.783178] env[63024]: DEBUG oslo_vmware.api [None req-19e713c2-b35b-4f4c-874b-38e9317a2ced tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950749, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.126729} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1697.783599] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-19e713c2-b35b-4f4c-874b-38e9317a2ced tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1697.783875] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-19e713c2-b35b-4f4c-874b-38e9317a2ced tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1697.784185] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-19e713c2-b35b-4f4c-874b-38e9317a2ced tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1697.784452] env[63024]: INFO nova.compute.manager [None req-19e713c2-b35b-4f4c-874b-38e9317a2ced tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1697.784828] env[63024]: DEBUG oslo.service.loopingcall [None req-19e713c2-b35b-4f4c-874b-38e9317a2ced tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1697.785138] env[63024]: DEBUG nova.compute.manager [-] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1697.785301] env[63024]: DEBUG nova.network.neutron [-] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1698.067204] env[63024]: DEBUG nova.compute.manager [req-739fa783-cfd8-44d3-977f-f68eada649a2 req-7fb6337d-a454-4cb5-97c1-040734a52fa5 service nova] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Received event network-vif-deleted-4a4fca95-5ff6-49cc-8848-f863d762cb8c {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1698.067437] env[63024]: INFO nova.compute.manager [req-739fa783-cfd8-44d3-977f-f68eada649a2 req-7fb6337d-a454-4cb5-97c1-040734a52fa5 service nova] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Neutron deleted interface 4a4fca95-5ff6-49cc-8848-f863d762cb8c; detaching it from the instance and deleting it from the info cache [ 1698.067553] env[63024]: DEBUG nova.network.neutron [req-739fa783-cfd8-44d3-977f-f68eada649a2 req-7fb6337d-a454-4cb5-97c1-040734a52fa5 service nova] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1698.138125] env[63024]: DEBUG oslo_concurrency.lockutils [None req-05840052-0709-42e5-b3e4-1e227ebc43d1 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Acquiring lock "61fdfa06-cb40-44a3-8abc-428b26bd40f5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1698.138410] env[63024]: DEBUG oslo_concurrency.lockutils [None req-05840052-0709-42e5-b3e4-1e227ebc43d1 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Lock "61fdfa06-cb40-44a3-8abc-428b26bd40f5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1698.138628] env[63024]: DEBUG oslo_concurrency.lockutils [None req-05840052-0709-42e5-b3e4-1e227ebc43d1 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Acquiring lock "61fdfa06-cb40-44a3-8abc-428b26bd40f5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1698.138834] env[63024]: DEBUG oslo_concurrency.lockutils [None req-05840052-0709-42e5-b3e4-1e227ebc43d1 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Lock "61fdfa06-cb40-44a3-8abc-428b26bd40f5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1698.138967] env[63024]: DEBUG oslo_concurrency.lockutils [None req-05840052-0709-42e5-b3e4-1e227ebc43d1 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Lock "61fdfa06-cb40-44a3-8abc-428b26bd40f5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1698.141244] env[63024]: INFO nova.compute.manager [None req-05840052-0709-42e5-b3e4-1e227ebc43d1 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Terminating instance [ 1698.218242] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3fbd6312-a262-479d-bc95-8c6c4f60b688 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Lock "28b3bfc7-2bed-4941-9f48-8bd301e1a971" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.308s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1698.247258] env[63024]: DEBUG oslo_concurrency.lockutils [None req-00a457ee-1f38-45d0-a954-b2bf3d01c737 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Lock "d49eae54-cccb-4281-aaa0-d6974529eb7b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.263s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1698.260399] env[63024]: INFO nova.compute.manager [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] Took 48.50 seconds to build instance. [ 1698.546604] env[63024]: DEBUG nova.network.neutron [-] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1698.570772] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8d568af3-0b44-4720-b2d3-87e48f0823c3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.585720] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ec4f99c-31a0-4387-b8ee-651599bffe8b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.620496] env[63024]: DEBUG nova.compute.manager [req-739fa783-cfd8-44d3-977f-f68eada649a2 req-7fb6337d-a454-4cb5-97c1-040734a52fa5 service nova] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Detach interface failed, port_id=4a4fca95-5ff6-49cc-8848-f863d762cb8c, reason: Instance bd07735a-6a75-45fb-9cef-e1f2c301a489 could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 1698.648026] env[63024]: DEBUG nova.compute.manager [None req-05840052-0709-42e5-b3e4-1e227ebc43d1 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1698.648026] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-05840052-0709-42e5-b3e4-1e227ebc43d1 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1698.648026] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01dd2b1b-f5ed-4f1c-b9a9-eb177e756435 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.656636] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-05840052-0709-42e5-b3e4-1e227ebc43d1 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1698.656636] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1825ba22-09bb-410e-938f-58de0b058b4d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.663006] env[63024]: DEBUG oslo_vmware.api [None req-05840052-0709-42e5-b3e4-1e227ebc43d1 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Waiting for the task: (returnval){ [ 1698.663006] env[63024]: value = "task-1950751" [ 1698.663006] env[63024]: _type = "Task" [ 1698.663006] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1698.672484] env[63024]: DEBUG oslo_vmware.api [None req-05840052-0709-42e5-b3e4-1e227ebc43d1 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Task: {'id': task-1950751, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.689521] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1698.689874] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1698.722495] env[63024]: DEBUG nova.compute.manager [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1698.763778] env[63024]: DEBUG oslo_concurrency.lockutils [None req-38e69794-5eb0-4675-b900-46bb90621faa tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Lock "cc5cfa6d-d3db-4997-8413-2460e1124f02" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.252s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1699.050180] env[63024]: INFO nova.compute.manager [-] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Took 1.26 seconds to deallocate network for instance. [ 1699.082664] env[63024]: DEBUG oslo_concurrency.lockutils [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Acquiring lock "ccd80e20-9fc2-415a-a428-fcf85994c7f8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1699.082917] env[63024]: DEBUG oslo_concurrency.lockutils [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Lock "ccd80e20-9fc2-415a-a428-fcf85994c7f8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1699.084226] env[63024]: DEBUG oslo_concurrency.lockutils [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Acquiring lock "ccd80e20-9fc2-415a-a428-fcf85994c7f8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1699.084226] env[63024]: DEBUG oslo_concurrency.lockutils [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Lock "ccd80e20-9fc2-415a-a428-fcf85994c7f8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1699.084226] env[63024]: DEBUG oslo_concurrency.lockutils [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Lock "ccd80e20-9fc2-415a-a428-fcf85994c7f8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1699.088217] env[63024]: INFO nova.compute.manager [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Terminating instance [ 1699.151847] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d868255a-d622-4978-bc5e-7d66e6ce5f18 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Acquiring lock "28b3bfc7-2bed-4941-9f48-8bd301e1a971" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1699.151847] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d868255a-d622-4978-bc5e-7d66e6ce5f18 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Lock "28b3bfc7-2bed-4941-9f48-8bd301e1a971" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1699.152092] env[63024]: INFO nova.compute.manager [None req-d868255a-d622-4978-bc5e-7d66e6ce5f18 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Rebooting instance [ 1699.154949] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34346a22-762a-490a-9bb0-3b0b59551128 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.163692] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4240f4f6-ba0e-408b-8812-a1ab9ac8ccc8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.176117] env[63024]: DEBUG oslo_vmware.api [None req-05840052-0709-42e5-b3e4-1e227ebc43d1 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Task: {'id': task-1950751, 'name': PowerOffVM_Task, 'duration_secs': 0.34622} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1699.199514] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-05840052-0709-42e5-b3e4-1e227ebc43d1 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1699.199714] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-05840052-0709-42e5-b3e4-1e227ebc43d1 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1699.206360] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e36244fd-93db-4a1d-8761-e9e301fdaa17 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.208562] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b64be95-5c7d-4509-b878-3d5f557ad858 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.211101] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1699.211359] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Starting heal instance info cache {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10258}} [ 1699.218873] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4fb3c75-ab45-4a39-83ee-234c5e5b450f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.235279] env[63024]: DEBUG nova.compute.provider_tree [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1699.248277] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1699.268903] env[63024]: DEBUG nova.compute.manager [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1699.294028] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-05840052-0709-42e5-b3e4-1e227ebc43d1 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1699.294296] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-05840052-0709-42e5-b3e4-1e227ebc43d1 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1699.294428] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-05840052-0709-42e5-b3e4-1e227ebc43d1 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Deleting the datastore file [datastore1] 61fdfa06-cb40-44a3-8abc-428b26bd40f5 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1699.294709] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bcb13918-aa60-4811-841d-a2ac33ecc590 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.302427] env[63024]: DEBUG oslo_vmware.api [None req-05840052-0709-42e5-b3e4-1e227ebc43d1 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Waiting for the task: (returnval){ [ 1699.302427] env[63024]: value = "task-1950753" [ 1699.302427] env[63024]: _type = "Task" [ 1699.302427] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1699.312979] env[63024]: DEBUG oslo_vmware.api [None req-05840052-0709-42e5-b3e4-1e227ebc43d1 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Task: {'id': task-1950753, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1699.557704] env[63024]: DEBUG oslo_concurrency.lockutils [None req-19e713c2-b35b-4f4c-874b-38e9317a2ced tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1699.592716] env[63024]: DEBUG nova.compute.manager [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1699.593035] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1699.593859] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41d1a292-b331-4390-acb9-5a187ec58f87 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.601752] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1699.602007] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bd5c771b-7bcd-44b0-9d53-8ec6f1784cfd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1699.607854] env[63024]: DEBUG oslo_vmware.api [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Waiting for the task: (returnval){ [ 1699.607854] env[63024]: value = "task-1950754" [ 1699.607854] env[63024]: _type = "Task" [ 1699.607854] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1699.616379] env[63024]: DEBUG oslo_vmware.api [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950754, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1699.684480] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d868255a-d622-4978-bc5e-7d66e6ce5f18 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Acquiring lock "refresh_cache-28b3bfc7-2bed-4941-9f48-8bd301e1a971" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1699.684598] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d868255a-d622-4978-bc5e-7d66e6ce5f18 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Acquired lock "refresh_cache-28b3bfc7-2bed-4941-9f48-8bd301e1a971" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1699.684763] env[63024]: DEBUG nova.network.neutron [None req-d868255a-d622-4978-bc5e-7d66e6ce5f18 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1699.714570] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Skipping network cache update for instance because it is being deleted. {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10307}} [ 1699.740421] env[63024]: DEBUG nova.scheduler.client.report [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1699.796423] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1699.818152] env[63024]: DEBUG oslo_vmware.api [None req-05840052-0709-42e5-b3e4-1e227ebc43d1 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Task: {'id': task-1950753, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.383814} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1699.818152] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-05840052-0709-42e5-b3e4-1e227ebc43d1 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1699.818152] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-05840052-0709-42e5-b3e4-1e227ebc43d1 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1699.818152] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-05840052-0709-42e5-b3e4-1e227ebc43d1 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1699.818152] env[63024]: INFO nova.compute.manager [None req-05840052-0709-42e5-b3e4-1e227ebc43d1 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1699.818340] env[63024]: DEBUG oslo.service.loopingcall [None req-05840052-0709-42e5-b3e4-1e227ebc43d1 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1699.818340] env[63024]: DEBUG nova.compute.manager [-] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1699.818411] env[63024]: DEBUG nova.network.neutron [-] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1699.864626] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3b7b8a3c-c1a2-4fa3-8bad-43bc115fe399 tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Acquiring lock "cc5cfa6d-d3db-4997-8413-2460e1124f02" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1699.864944] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3b7b8a3c-c1a2-4fa3-8bad-43bc115fe399 tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Lock "cc5cfa6d-d3db-4997-8413-2460e1124f02" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1699.865207] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3b7b8a3c-c1a2-4fa3-8bad-43bc115fe399 tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Acquiring lock "cc5cfa6d-d3db-4997-8413-2460e1124f02-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1699.865420] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3b7b8a3c-c1a2-4fa3-8bad-43bc115fe399 tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Lock "cc5cfa6d-d3db-4997-8413-2460e1124f02-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1699.865635] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3b7b8a3c-c1a2-4fa3-8bad-43bc115fe399 tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Lock "cc5cfa6d-d3db-4997-8413-2460e1124f02-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1699.868405] env[63024]: INFO nova.compute.manager [None req-3b7b8a3c-c1a2-4fa3-8bad-43bc115fe399 tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] Terminating instance [ 1700.118461] env[63024]: DEBUG oslo_vmware.api [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950754, 'name': PowerOffVM_Task, 'duration_secs': 0.286511} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1700.118734] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1700.118905] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1700.119171] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-03c173b3-d73c-4db9-bdd4-7c9bac3667c6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.226423] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1700.226735] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1700.227049] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Deleting the datastore file [datastore1] ccd80e20-9fc2-415a-a428-fcf85994c7f8 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1700.227426] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6b8208fe-5660-4e3e-a13c-67f28dd7708a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.234531] env[63024]: DEBUG oslo_vmware.api [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Waiting for the task: (returnval){ [ 1700.234531] env[63024]: value = "task-1950756" [ 1700.234531] env[63024]: _type = "Task" [ 1700.234531] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1700.249113] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.563s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1700.249837] env[63024]: DEBUG nova.compute.manager [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1700.254553] env[63024]: DEBUG oslo_vmware.api [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950756, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1700.258124] env[63024]: DEBUG oslo_concurrency.lockutils [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 40.382s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1700.260980] env[63024]: INFO nova.compute.claims [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1700.270247] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "refresh_cache-b629b4f8-f79f-4361-b78c-8705a6888a9e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1700.270488] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquired lock "refresh_cache-b629b4f8-f79f-4361-b78c-8705a6888a9e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1700.270727] env[63024]: DEBUG nova.network.neutron [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Forcefully refreshing network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1700.373645] env[63024]: DEBUG nova.compute.manager [None req-3b7b8a3c-c1a2-4fa3-8bad-43bc115fe399 tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1700.373902] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3b7b8a3c-c1a2-4fa3-8bad-43bc115fe399 tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1700.374799] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfb89d90-96a0-410a-a7d1-0aac8241fdd8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.382402] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b7b8a3c-c1a2-4fa3-8bad-43bc115fe399 tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1700.382691] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-89fa446e-b713-487d-9186-3b2f1a59735b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.389669] env[63024]: DEBUG oslo_vmware.api [None req-3b7b8a3c-c1a2-4fa3-8bad-43bc115fe399 tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Waiting for the task: (returnval){ [ 1700.389669] env[63024]: value = "task-1950758" [ 1700.389669] env[63024]: _type = "Task" [ 1700.389669] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1700.406524] env[63024]: DEBUG oslo_vmware.api [None req-3b7b8a3c-c1a2-4fa3-8bad-43bc115fe399 tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Task: {'id': task-1950758, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1700.481044] env[63024]: DEBUG nova.network.neutron [None req-d868255a-d622-4978-bc5e-7d66e6ce5f18 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Updating instance_info_cache with network_info: [{"id": "ab9681ef-e7dc-4992-be61-3ef37483b9b8", "address": "fa:16:3e:9b:40:e1", "network": {"id": "05e5c67b-622f-489b-803d-eb380e0adc8c", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-529483603-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3a86fdb7cd0e415d9ec099d327fbdca3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aec0089a-ff85-4bef-bad8-c84de39af71a", "external-id": "nsx-vlan-transportzone-758", "segmentation_id": 758, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab9681ef-e7", "ovs_interfaceid": "ab9681ef-e7dc-4992-be61-3ef37483b9b8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1700.597825] env[63024]: DEBUG nova.compute.manager [req-f6ccf358-5ece-4552-9a71-6299326d7c52 req-b1b8354f-8aaf-4c82-b463-1e7e9b43de0a service nova] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Received event network-vif-deleted-421d2adb-43a2-41f5-b64d-29989f6a0fa5 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1700.597825] env[63024]: INFO nova.compute.manager [req-f6ccf358-5ece-4552-9a71-6299326d7c52 req-b1b8354f-8aaf-4c82-b463-1e7e9b43de0a service nova] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Neutron deleted interface 421d2adb-43a2-41f5-b64d-29989f6a0fa5; detaching it from the instance and deleting it from the info cache [ 1700.597825] env[63024]: DEBUG nova.network.neutron [req-f6ccf358-5ece-4552-9a71-6299326d7c52 req-b1b8354f-8aaf-4c82-b463-1e7e9b43de0a service nova] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1700.744819] env[63024]: DEBUG oslo_vmware.api [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Task: {'id': task-1950756, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.187242} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1700.747032] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1700.747032] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1700.747032] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1700.747032] env[63024]: INFO nova.compute.manager [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1700.747032] env[63024]: DEBUG oslo.service.loopingcall [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1700.747244] env[63024]: DEBUG nova.compute.manager [-] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1700.747244] env[63024]: DEBUG nova.network.neutron [-] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1700.757414] env[63024]: DEBUG nova.compute.utils [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1700.758741] env[63024]: DEBUG nova.compute.manager [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1700.758904] env[63024]: DEBUG nova.network.neutron [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1700.796782] env[63024]: DEBUG nova.network.neutron [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1700.808558] env[63024]: DEBUG nova.policy [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b8e2c246b2cf49aa95f7b002eab2e45b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c7ed27976af940448e1017ee9c572fa9', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1700.899362] env[63024]: DEBUG oslo_vmware.api [None req-3b7b8a3c-c1a2-4fa3-8bad-43bc115fe399 tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Task: {'id': task-1950758, 'name': PowerOffVM_Task, 'duration_secs': 0.18824} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1700.899632] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b7b8a3c-c1a2-4fa3-8bad-43bc115fe399 tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1700.899799] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3b7b8a3c-c1a2-4fa3-8bad-43bc115fe399 tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1700.900078] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-57110877-0298-4a0f-8832-3d846bc475fc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.981646] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3b7b8a3c-c1a2-4fa3-8bad-43bc115fe399 tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1700.981646] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3b7b8a3c-c1a2-4fa3-8bad-43bc115fe399 tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1700.981835] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b7b8a3c-c1a2-4fa3-8bad-43bc115fe399 tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Deleting the datastore file [datastore1] cc5cfa6d-d3db-4997-8413-2460e1124f02 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1700.981947] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ef13a4f1-c0b7-40a5-bee1-2f00aa2f030b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.984765] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d868255a-d622-4978-bc5e-7d66e6ce5f18 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Releasing lock "refresh_cache-28b3bfc7-2bed-4941-9f48-8bd301e1a971" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1700.987064] env[63024]: DEBUG nova.compute.manager [None req-d868255a-d622-4978-bc5e-7d66e6ce5f18 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1700.988794] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91ca5dc7-62eb-4433-a058-d56ac695976c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1700.993281] env[63024]: DEBUG oslo_vmware.api [None req-3b7b8a3c-c1a2-4fa3-8bad-43bc115fe399 tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Waiting for the task: (returnval){ [ 1700.993281] env[63024]: value = "task-1950760" [ 1700.993281] env[63024]: _type = "Task" [ 1700.993281] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1701.013687] env[63024]: DEBUG oslo_vmware.api [None req-3b7b8a3c-c1a2-4fa3-8bad-43bc115fe399 tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Task: {'id': task-1950760, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1701.042663] env[63024]: DEBUG nova.network.neutron [-] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1701.100512] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-94522ee0-92e4-4d56-b61e-a763b9e121c9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.110352] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-711d90be-120b-4fa1-b6f7-a867575f7504 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.155962] env[63024]: DEBUG nova.compute.manager [req-f6ccf358-5ece-4552-9a71-6299326d7c52 req-b1b8354f-8aaf-4c82-b463-1e7e9b43de0a service nova] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Detach interface failed, port_id=421d2adb-43a2-41f5-b64d-29989f6a0fa5, reason: Instance 61fdfa06-cb40-44a3-8abc-428b26bd40f5 could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 1701.174572] env[63024]: DEBUG nova.network.neutron [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Successfully created port: 67b8a4fd-320c-4178-ab08-8bbe9fb878ba {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1701.262191] env[63024]: DEBUG nova.compute.manager [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1701.413556] env[63024]: DEBUG nova.network.neutron [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1701.481294] env[63024]: DEBUG nova.network.neutron [-] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1701.518945] env[63024]: DEBUG oslo_vmware.api [None req-3b7b8a3c-c1a2-4fa3-8bad-43bc115fe399 tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Task: {'id': task-1950760, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.325234} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1701.518945] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b7b8a3c-c1a2-4fa3-8bad-43bc115fe399 tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1701.518945] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3b7b8a3c-c1a2-4fa3-8bad-43bc115fe399 tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1701.518945] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3b7b8a3c-c1a2-4fa3-8bad-43bc115fe399 tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1701.519362] env[63024]: INFO nova.compute.manager [None req-3b7b8a3c-c1a2-4fa3-8bad-43bc115fe399 tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1701.519362] env[63024]: DEBUG oslo.service.loopingcall [None req-3b7b8a3c-c1a2-4fa3-8bad-43bc115fe399 tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1701.519996] env[63024]: DEBUG nova.compute.manager [-] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1701.520119] env[63024]: DEBUG nova.network.neutron [-] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1701.547432] env[63024]: INFO nova.compute.manager [-] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Took 1.73 seconds to deallocate network for instance. [ 1701.832033] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52385d8b-bc2a-4dda-bc5e-614b193157cd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.839371] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5565258b-11c9-4d8f-ac41-fb9aa0e4a33b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.870372] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a157c39-5299-483a-b8c2-9809b36a5da6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.879108] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61aab01b-d470-467c-a55f-17b5c6835884 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1701.893264] env[63024]: DEBUG nova.compute.provider_tree [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1701.923428] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Releasing lock "refresh_cache-b629b4f8-f79f-4361-b78c-8705a6888a9e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1701.923636] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Updated the network info_cache for instance {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10329}} [ 1701.923826] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1701.924112] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1701.924280] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1701.924430] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1701.924771] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1701.924771] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1701.924899] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63024) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10877}} [ 1701.925056] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager.update_available_resource {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1701.986617] env[63024]: INFO nova.compute.manager [-] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Took 1.24 seconds to deallocate network for instance. [ 1701.993430] env[63024]: DEBUG oslo_concurrency.lockutils [None req-26213794-cad2-4b34-96bf-cb02bddb6fc7 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Acquiring lock "04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1701.993658] env[63024]: DEBUG oslo_concurrency.lockutils [None req-26213794-cad2-4b34-96bf-cb02bddb6fc7 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Lock "04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1701.993857] env[63024]: DEBUG oslo_concurrency.lockutils [None req-26213794-cad2-4b34-96bf-cb02bddb6fc7 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Acquiring lock "04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1701.994050] env[63024]: DEBUG oslo_concurrency.lockutils [None req-26213794-cad2-4b34-96bf-cb02bddb6fc7 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Lock "04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1701.994228] env[63024]: DEBUG oslo_concurrency.lockutils [None req-26213794-cad2-4b34-96bf-cb02bddb6fc7 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Lock "04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1701.998722] env[63024]: INFO nova.compute.manager [None req-26213794-cad2-4b34-96bf-cb02bddb6fc7 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Terminating instance [ 1702.020336] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4d332a9-4454-461f-8ba2-afcde44af95f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.028616] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d868255a-d622-4978-bc5e-7d66e6ce5f18 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Doing hard reboot of VM {{(pid=63024) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1702.028870] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-195afd93-cd23-46e1-8006-d65e7bd155bb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.035513] env[63024]: DEBUG oslo_vmware.api [None req-d868255a-d622-4978-bc5e-7d66e6ce5f18 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Waiting for the task: (returnval){ [ 1702.035513] env[63024]: value = "task-1950761" [ 1702.035513] env[63024]: _type = "Task" [ 1702.035513] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1702.043324] env[63024]: DEBUG oslo_vmware.api [None req-d868255a-d622-4978-bc5e-7d66e6ce5f18 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Task: {'id': task-1950761, 'name': ResetVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.054799] env[63024]: DEBUG oslo_concurrency.lockutils [None req-05840052-0709-42e5-b3e4-1e227ebc43d1 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1702.268235] env[63024]: DEBUG nova.network.neutron [-] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1702.283626] env[63024]: DEBUG nova.compute.manager [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1702.320318] env[63024]: DEBUG nova.virt.hardware [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1702.320596] env[63024]: DEBUG nova.virt.hardware [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1702.320750] env[63024]: DEBUG nova.virt.hardware [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1702.320932] env[63024]: DEBUG nova.virt.hardware [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1702.321124] env[63024]: DEBUG nova.virt.hardware [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1702.321294] env[63024]: DEBUG nova.virt.hardware [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1702.321502] env[63024]: DEBUG nova.virt.hardware [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1702.321662] env[63024]: DEBUG nova.virt.hardware [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1702.321837] env[63024]: DEBUG nova.virt.hardware [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1702.322010] env[63024]: DEBUG nova.virt.hardware [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1702.322191] env[63024]: DEBUG nova.virt.hardware [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1702.323429] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98fc3806-1906-449a-ad19-363a0dcf9d98 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.332127] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4537db0-047c-42c2-a32d-7d3147ce3140 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.396350] env[63024]: DEBUG nova.scheduler.client.report [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1702.427919] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1702.493993] env[63024]: DEBUG oslo_concurrency.lockutils [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1702.502805] env[63024]: DEBUG nova.compute.manager [None req-26213794-cad2-4b34-96bf-cb02bddb6fc7 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1702.503071] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-26213794-cad2-4b34-96bf-cb02bddb6fc7 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1702.504068] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9f0377e-74a0-40bf-a251-c7ccab6a7c80 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.513378] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-26213794-cad2-4b34-96bf-cb02bddb6fc7 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1702.513946] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f8a4a8dd-354c-4a39-8d01-fc5cb0f2c114 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.519945] env[63024]: DEBUG oslo_vmware.api [None req-26213794-cad2-4b34-96bf-cb02bddb6fc7 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Waiting for the task: (returnval){ [ 1702.519945] env[63024]: value = "task-1950763" [ 1702.519945] env[63024]: _type = "Task" [ 1702.519945] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1702.527900] env[63024]: DEBUG oslo_vmware.api [None req-26213794-cad2-4b34-96bf-cb02bddb6fc7 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Task: {'id': task-1950763, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1702.546418] env[63024]: DEBUG oslo_vmware.api [None req-d868255a-d622-4978-bc5e-7d66e6ce5f18 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Task: {'id': task-1950761, 'name': ResetVM_Task, 'duration_secs': 0.087604} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1702.546686] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d868255a-d622-4978-bc5e-7d66e6ce5f18 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Did hard reboot of VM {{(pid=63024) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1702.546852] env[63024]: DEBUG nova.compute.manager [None req-d868255a-d622-4978-bc5e-7d66e6ce5f18 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1702.547886] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe7e4c1f-56bb-479a-bd4c-635e4ad5c288 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1702.629400] env[63024]: DEBUG nova.compute.manager [req-2001182e-b962-4a9b-9649-777aa159d1c6 req-60e545a0-1d5f-431b-a9bd-92dba2371ab6 service nova] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Received event network-vif-deleted-223e2d99-de00-4474-8225-7fee75ac28d6 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1702.629639] env[63024]: DEBUG nova.compute.manager [req-2001182e-b962-4a9b-9649-777aa159d1c6 req-60e545a0-1d5f-431b-a9bd-92dba2371ab6 service nova] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] Received event network-vif-deleted-e33e15ee-eb9d-4eaf-8eb7-845fe940d23d {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1702.773924] env[63024]: INFO nova.compute.manager [-] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] Took 1.25 seconds to deallocate network for instance. [ 1702.830531] env[63024]: DEBUG nova.compute.manager [req-a2b17ce5-026f-49a3-b6e1-7793e720ffd5 req-df3f1987-2047-42ba-8749-aa6f4a6d6625 service nova] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Received event network-vif-plugged-67b8a4fd-320c-4178-ab08-8bbe9fb878ba {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1702.830745] env[63024]: DEBUG oslo_concurrency.lockutils [req-a2b17ce5-026f-49a3-b6e1-7793e720ffd5 req-df3f1987-2047-42ba-8749-aa6f4a6d6625 service nova] Acquiring lock "9679a1a2-b003-4a60-a812-8b3a9b5f545f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1702.830949] env[63024]: DEBUG oslo_concurrency.lockutils [req-a2b17ce5-026f-49a3-b6e1-7793e720ffd5 req-df3f1987-2047-42ba-8749-aa6f4a6d6625 service nova] Lock "9679a1a2-b003-4a60-a812-8b3a9b5f545f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1702.831333] env[63024]: DEBUG oslo_concurrency.lockutils [req-a2b17ce5-026f-49a3-b6e1-7793e720ffd5 req-df3f1987-2047-42ba-8749-aa6f4a6d6625 service nova] Lock "9679a1a2-b003-4a60-a812-8b3a9b5f545f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1702.831523] env[63024]: DEBUG nova.compute.manager [req-a2b17ce5-026f-49a3-b6e1-7793e720ffd5 req-df3f1987-2047-42ba-8749-aa6f4a6d6625 service nova] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] No waiting events found dispatching network-vif-plugged-67b8a4fd-320c-4178-ab08-8bbe9fb878ba {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1702.831683] env[63024]: WARNING nova.compute.manager [req-a2b17ce5-026f-49a3-b6e1-7793e720ffd5 req-df3f1987-2047-42ba-8749-aa6f4a6d6625 service nova] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Received unexpected event network-vif-plugged-67b8a4fd-320c-4178-ab08-8bbe9fb878ba for instance with vm_state building and task_state spawning. [ 1702.902392] env[63024]: DEBUG oslo_concurrency.lockutils [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.646s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1702.902936] env[63024]: DEBUG nova.compute.manager [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1702.908100] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e6138a81-3a08-4fc5-99c4-2b2ab88def37 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.696s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1702.908320] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e6138a81-3a08-4fc5-99c4-2b2ab88def37 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1702.910464] env[63024]: DEBUG oslo_concurrency.lockutils [None req-19d2b9d0-4aa4-47b3-85f8-fc2fcc925cbf tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.462s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1702.910649] env[63024]: DEBUG oslo_concurrency.lockutils [None req-19d2b9d0-4aa4-47b3-85f8-fc2fcc925cbf tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1702.912455] env[63024]: DEBUG oslo_concurrency.lockutils [None req-677364b9-353a-44ed-b88c-6aba81b1a05a tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.387s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1702.912631] env[63024]: DEBUG oslo_concurrency.lockutils [None req-677364b9-353a-44ed-b88c-6aba81b1a05a tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1702.914390] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 34.429s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1702.915843] env[63024]: INFO nova.compute.claims [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1702.952492] env[63024]: INFO nova.scheduler.client.report [None req-19d2b9d0-4aa4-47b3-85f8-fc2fcc925cbf tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Deleted allocations for instance b629b4f8-f79f-4361-b78c-8705a6888a9e [ 1702.959088] env[63024]: DEBUG nova.network.neutron [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Successfully updated port: 67b8a4fd-320c-4178-ab08-8bbe9fb878ba {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1702.961692] env[63024]: INFO nova.scheduler.client.report [None req-677364b9-353a-44ed-b88c-6aba81b1a05a tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Deleted allocations for instance 00e925a1-9b79-46e2-b7f7-c0b63e1e72df [ 1702.972738] env[63024]: INFO nova.scheduler.client.report [None req-e6138a81-3a08-4fc5-99c4-2b2ab88def37 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Deleted allocations for instance ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d [ 1703.031762] env[63024]: DEBUG oslo_vmware.api [None req-26213794-cad2-4b34-96bf-cb02bddb6fc7 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Task: {'id': task-1950763, 'name': PowerOffVM_Task, 'duration_secs': 0.230456} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1703.032016] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-26213794-cad2-4b34-96bf-cb02bddb6fc7 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1703.032198] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-26213794-cad2-4b34-96bf-cb02bddb6fc7 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1703.032476] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-439a611c-0956-4e76-9d72-68de643fb7e4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.059919] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d868255a-d622-4978-bc5e-7d66e6ce5f18 tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Lock "28b3bfc7-2bed-4941-9f48-8bd301e1a971" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 3.908s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1703.098261] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-26213794-cad2-4b34-96bf-cb02bddb6fc7 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1703.098492] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-26213794-cad2-4b34-96bf-cb02bddb6fc7 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1703.098675] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-26213794-cad2-4b34-96bf-cb02bddb6fc7 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Deleting the datastore file [datastore1] 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1703.099366] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2cb4fdc3-63cb-424d-9af5-a295957b684d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1703.110582] env[63024]: DEBUG oslo_vmware.api [None req-26213794-cad2-4b34-96bf-cb02bddb6fc7 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Waiting for the task: (returnval){ [ 1703.110582] env[63024]: value = "task-1950765" [ 1703.110582] env[63024]: _type = "Task" [ 1703.110582] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1703.116084] env[63024]: DEBUG oslo_vmware.api [None req-26213794-cad2-4b34-96bf-cb02bddb6fc7 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Task: {'id': task-1950765, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1703.282872] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3b7b8a3c-c1a2-4fa3-8bad-43bc115fe399 tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1703.423810] env[63024]: DEBUG nova.compute.utils [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1703.425088] env[63024]: DEBUG nova.compute.manager [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1703.425702] env[63024]: DEBUG nova.network.neutron [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1703.464889] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Acquiring lock "refresh_cache-9679a1a2-b003-4a60-a812-8b3a9b5f545f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1703.464889] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Acquired lock "refresh_cache-9679a1a2-b003-4a60-a812-8b3a9b5f545f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1703.464889] env[63024]: DEBUG nova.network.neutron [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1703.466563] env[63024]: DEBUG oslo_concurrency.lockutils [None req-19d2b9d0-4aa4-47b3-85f8-fc2fcc925cbf tempest-FloatingIPsAssociationTestJSON-1956190197 tempest-FloatingIPsAssociationTestJSON-1956190197-project-member] Lock "b629b4f8-f79f-4361-b78c-8705a6888a9e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.139s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1703.474673] env[63024]: DEBUG oslo_concurrency.lockutils [None req-677364b9-353a-44ed-b88c-6aba81b1a05a tempest-ServerDiagnosticsTest-1004748477 tempest-ServerDiagnosticsTest-1004748477-project-member] Lock "00e925a1-9b79-46e2-b7f7-c0b63e1e72df" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.553s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1703.477512] env[63024]: DEBUG nova.policy [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1ef31aa3582f4b2dab5f9ce2465d5e32', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '674f344eaf784662ac922405620a3ac4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1703.482129] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e6138a81-3a08-4fc5-99c4-2b2ab88def37 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Lock "ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.474s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1703.617028] env[63024]: DEBUG oslo_vmware.api [None req-26213794-cad2-4b34-96bf-cb02bddb6fc7 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Task: {'id': task-1950765, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140222} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1703.617268] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-26213794-cad2-4b34-96bf-cb02bddb6fc7 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1703.617456] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-26213794-cad2-4b34-96bf-cb02bddb6fc7 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1703.617630] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-26213794-cad2-4b34-96bf-cb02bddb6fc7 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1703.617797] env[63024]: INFO nova.compute.manager [None req-26213794-cad2-4b34-96bf-cb02bddb6fc7 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1703.618045] env[63024]: DEBUG oslo.service.loopingcall [None req-26213794-cad2-4b34-96bf-cb02bddb6fc7 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1703.618269] env[63024]: DEBUG nova.compute.manager [-] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1703.618364] env[63024]: DEBUG nova.network.neutron [-] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1703.929760] env[63024]: DEBUG nova.compute.manager [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1703.966457] env[63024]: DEBUG nova.network.neutron [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Successfully created port: 52fd1b1a-cd75-4bd3-967c-b98213510df1 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1704.084017] env[63024]: DEBUG nova.network.neutron [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1704.282387] env[63024]: DEBUG oslo_concurrency.lockutils [None req-32e59551-f5e0-467a-81ad-4a69835f8cc9 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Acquiring lock "2bfcd5e1-b1d9-4829-bea5-d8c460ceec16" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1704.282587] env[63024]: DEBUG oslo_concurrency.lockutils [None req-32e59551-f5e0-467a-81ad-4a69835f8cc9 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Lock "2bfcd5e1-b1d9-4829-bea5-d8c460ceec16" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1704.282845] env[63024]: DEBUG oslo_concurrency.lockutils [None req-32e59551-f5e0-467a-81ad-4a69835f8cc9 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Acquiring lock "2bfcd5e1-b1d9-4829-bea5-d8c460ceec16-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1704.283083] env[63024]: DEBUG oslo_concurrency.lockutils [None req-32e59551-f5e0-467a-81ad-4a69835f8cc9 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Lock "2bfcd5e1-b1d9-4829-bea5-d8c460ceec16-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1704.284304] env[63024]: DEBUG oslo_concurrency.lockutils [None req-32e59551-f5e0-467a-81ad-4a69835f8cc9 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Lock "2bfcd5e1-b1d9-4829-bea5-d8c460ceec16-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1704.286797] env[63024]: INFO nova.compute.manager [None req-32e59551-f5e0-467a-81ad-4a69835f8cc9 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] Terminating instance [ 1704.442624] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f6be508-216c-467b-80bb-5db813df33a3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.452031] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d7ffd9e-0158-4c01-a595-2f3a7a7a509e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.489318] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13197231-72cd-44fd-9a61-49703efeb4e1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.493568] env[63024]: DEBUG nova.network.neutron [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Updating instance_info_cache with network_info: [{"id": "67b8a4fd-320c-4178-ab08-8bbe9fb878ba", "address": "fa:16:3e:bb:35:36", "network": {"id": "b4353b4e-cb5e-42f6-979d-a0622ed453a3", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1437328880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c7ed27976af940448e1017ee9c572fa9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "93341b73-918c-4e9d-9c66-ca171a54b574", "external-id": "nsx-vlan-transportzone-663", "segmentation_id": 663, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67b8a4fd-32", "ovs_interfaceid": "67b8a4fd-320c-4178-ab08-8bbe9fb878ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1704.503295] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b26c80b0-b310-48bf-a765-760e0cd330b7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.520991] env[63024]: DEBUG nova.compute.provider_tree [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1704.795034] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c8d52a10-ff02-4350-b082-e89db6eaa53f tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Acquiring lock "28b3bfc7-2bed-4941-9f48-8bd301e1a971" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1704.795317] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c8d52a10-ff02-4350-b082-e89db6eaa53f tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Lock "28b3bfc7-2bed-4941-9f48-8bd301e1a971" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1704.795575] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c8d52a10-ff02-4350-b082-e89db6eaa53f tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Acquiring lock "28b3bfc7-2bed-4941-9f48-8bd301e1a971-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1704.795695] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c8d52a10-ff02-4350-b082-e89db6eaa53f tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Lock "28b3bfc7-2bed-4941-9f48-8bd301e1a971-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1704.795857] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c8d52a10-ff02-4350-b082-e89db6eaa53f tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Lock "28b3bfc7-2bed-4941-9f48-8bd301e1a971-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1704.797792] env[63024]: DEBUG nova.compute.manager [None req-32e59551-f5e0-467a-81ad-4a69835f8cc9 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1704.797988] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-32e59551-f5e0-467a-81ad-4a69835f8cc9 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1704.798501] env[63024]: INFO nova.compute.manager [None req-c8d52a10-ff02-4350-b082-e89db6eaa53f tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Terminating instance [ 1704.800441] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-782a9a4e-2910-49b7-93fa-28568e8aa5dd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.810866] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-32e59551-f5e0-467a-81ad-4a69835f8cc9 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1704.810866] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-07a1ace7-aa3c-4c59-b84f-5e60efa1000b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.818419] env[63024]: DEBUG oslo_vmware.api [None req-32e59551-f5e0-467a-81ad-4a69835f8cc9 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Waiting for the task: (returnval){ [ 1704.818419] env[63024]: value = "task-1950767" [ 1704.818419] env[63024]: _type = "Task" [ 1704.818419] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1704.827565] env[63024]: DEBUG oslo_vmware.api [None req-32e59551-f5e0-467a-81ad-4a69835f8cc9 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Task: {'id': task-1950767, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1704.874046] env[63024]: DEBUG nova.network.neutron [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Successfully created port: e82533e3-2173-4dc5-911c-829fa32117ad {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1704.940189] env[63024]: DEBUG nova.compute.manager [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1704.984600] env[63024]: DEBUG nova.virt.hardware [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1704.984600] env[63024]: DEBUG nova.virt.hardware [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1704.984600] env[63024]: DEBUG nova.virt.hardware [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1704.984600] env[63024]: DEBUG nova.virt.hardware [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1704.984908] env[63024]: DEBUG nova.virt.hardware [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1704.985626] env[63024]: DEBUG nova.virt.hardware [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1704.986134] env[63024]: DEBUG nova.virt.hardware [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1704.986238] env[63024]: DEBUG nova.virt.hardware [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1704.986495] env[63024]: DEBUG nova.virt.hardware [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1704.986695] env[63024]: DEBUG nova.virt.hardware [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1704.986976] env[63024]: DEBUG nova.virt.hardware [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1704.988267] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-691f3685-7851-47b6-aaea-0e537c6187c7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1704.995833] env[63024]: DEBUG nova.network.neutron [-] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1704.999378] env[63024]: DEBUG nova.compute.manager [req-57d4b773-f9a9-41fb-baab-cc7239e81bd6 req-5483668e-c9e0-4674-8018-657e48d50500 service nova] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Received event network-vif-deleted-879d1c91-c785-4da7-852e-abd159810127 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1704.999378] env[63024]: INFO nova.compute.manager [req-57d4b773-f9a9-41fb-baab-cc7239e81bd6 req-5483668e-c9e0-4674-8018-657e48d50500 service nova] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Neutron deleted interface 879d1c91-c785-4da7-852e-abd159810127; detaching it from the instance and deleting it from the info cache [ 1704.999378] env[63024]: DEBUG nova.network.neutron [req-57d4b773-f9a9-41fb-baab-cc7239e81bd6 req-5483668e-c9e0-4674-8018-657e48d50500 service nova] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1704.999941] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Releasing lock "refresh_cache-9679a1a2-b003-4a60-a812-8b3a9b5f545f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1705.000778] env[63024]: DEBUG nova.compute.manager [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Instance network_info: |[{"id": "67b8a4fd-320c-4178-ab08-8bbe9fb878ba", "address": "fa:16:3e:bb:35:36", "network": {"id": "b4353b4e-cb5e-42f6-979d-a0622ed453a3", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1437328880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c7ed27976af940448e1017ee9c572fa9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "93341b73-918c-4e9d-9c66-ca171a54b574", "external-id": "nsx-vlan-transportzone-663", "segmentation_id": 663, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67b8a4fd-32", "ovs_interfaceid": "67b8a4fd-320c-4178-ab08-8bbe9fb878ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1705.003650] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bb:35:36', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '93341b73-918c-4e9d-9c66-ca171a54b574', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '67b8a4fd-320c-4178-ab08-8bbe9fb878ba', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1705.013733] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Creating folder: Project (c7ed27976af940448e1017ee9c572fa9). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1705.017071] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1d143096-764b-425c-877c-56006db7d030 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.024112] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb4d9051-5c41-497f-9f09-4ee857f30789 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.040486] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Created folder: Project (c7ed27976af940448e1017ee9c572fa9) in parent group-v401959. [ 1705.041182] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Creating folder: Instances. Parent ref: group-v402081. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1705.049590] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-467c6f51-e6ec-457b-ace5-d26923fbfe94 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.060264] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Created folder: Instances in parent group-v402081. [ 1705.060264] env[63024]: DEBUG oslo.service.loopingcall [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1705.060709] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1705.060709] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f2606c84-1519-49d6-8b28-0d8bb94f82c9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.079466] env[63024]: ERROR nova.scheduler.client.report [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [req-d6fdcb7b-2d20-4260-a960-aeabe1458350] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 89dfa68a-133e-436f-a9f1-86051f9fb96b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-d6fdcb7b-2d20-4260-a960-aeabe1458350"}]} [ 1705.096640] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1705.096640] env[63024]: value = "task-1950770" [ 1705.096640] env[63024]: _type = "Task" [ 1705.096640] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1705.107289] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950770, 'name': CreateVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.111372] env[63024]: DEBUG nova.scheduler.client.report [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Refreshing inventories for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1705.130436] env[63024]: DEBUG nova.scheduler.client.report [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Updating ProviderTree inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1705.130673] env[63024]: DEBUG nova.compute.provider_tree [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1705.139722] env[63024]: DEBUG nova.compute.manager [req-ec52460a-c08e-4dc7-8243-12b9f69f2fdc req-81b15cdd-9d92-4701-ad55-d0d239892bac service nova] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Received event network-changed-67b8a4fd-320c-4178-ab08-8bbe9fb878ba {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1705.139722] env[63024]: DEBUG nova.compute.manager [req-ec52460a-c08e-4dc7-8243-12b9f69f2fdc req-81b15cdd-9d92-4701-ad55-d0d239892bac service nova] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Refreshing instance network info cache due to event network-changed-67b8a4fd-320c-4178-ab08-8bbe9fb878ba. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1705.139722] env[63024]: DEBUG oslo_concurrency.lockutils [req-ec52460a-c08e-4dc7-8243-12b9f69f2fdc req-81b15cdd-9d92-4701-ad55-d0d239892bac service nova] Acquiring lock "refresh_cache-9679a1a2-b003-4a60-a812-8b3a9b5f545f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1705.139722] env[63024]: DEBUG oslo_concurrency.lockutils [req-ec52460a-c08e-4dc7-8243-12b9f69f2fdc req-81b15cdd-9d92-4701-ad55-d0d239892bac service nova] Acquired lock "refresh_cache-9679a1a2-b003-4a60-a812-8b3a9b5f545f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1705.139722] env[63024]: DEBUG nova.network.neutron [req-ec52460a-c08e-4dc7-8243-12b9f69f2fdc req-81b15cdd-9d92-4701-ad55-d0d239892bac service nova] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Refreshing network info cache for port 67b8a4fd-320c-4178-ab08-8bbe9fb878ba {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1705.155607] env[63024]: DEBUG nova.scheduler.client.report [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Refreshing aggregate associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, aggregates: None {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1705.193314] env[63024]: DEBUG nova.scheduler.client.report [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Refreshing trait associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1705.306685] env[63024]: DEBUG nova.compute.manager [None req-c8d52a10-ff02-4350-b082-e89db6eaa53f tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1705.306850] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c8d52a10-ff02-4350-b082-e89db6eaa53f tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1705.310199] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27493054-31be-4b66-9440-9c0863b5edd1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.316641] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8d52a10-ff02-4350-b082-e89db6eaa53f tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1705.316950] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9b1879d1-23e0-48af-b5ee-9213962f5fb6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.328480] env[63024]: DEBUG oslo_vmware.api [None req-c8d52a10-ff02-4350-b082-e89db6eaa53f tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Waiting for the task: (returnval){ [ 1705.328480] env[63024]: value = "task-1950771" [ 1705.328480] env[63024]: _type = "Task" [ 1705.328480] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1705.333849] env[63024]: DEBUG oslo_vmware.api [None req-32e59551-f5e0-467a-81ad-4a69835f8cc9 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Task: {'id': task-1950767, 'name': PowerOffVM_Task, 'duration_secs': 0.366894} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1705.336653] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-32e59551-f5e0-467a-81ad-4a69835f8cc9 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1705.336653] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-32e59551-f5e0-467a-81ad-4a69835f8cc9 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1705.336818] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0bd7094f-d1bb-4e1f-8eb3-4c3c071631bc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.350415] env[63024]: DEBUG oslo_vmware.api [None req-c8d52a10-ff02-4350-b082-e89db6eaa53f tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Task: {'id': task-1950771, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.433234] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-32e59551-f5e0-467a-81ad-4a69835f8cc9 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1705.433397] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-32e59551-f5e0-467a-81ad-4a69835f8cc9 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1705.433599] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-32e59551-f5e0-467a-81ad-4a69835f8cc9 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Deleting the datastore file [datastore1] 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1705.433892] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0ac4fab7-39c9-4c00-82bf-e1dfffe7a7ca {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.442441] env[63024]: DEBUG oslo_vmware.api [None req-32e59551-f5e0-467a-81ad-4a69835f8cc9 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Waiting for the task: (returnval){ [ 1705.442441] env[63024]: value = "task-1950773" [ 1705.442441] env[63024]: _type = "Task" [ 1705.442441] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1705.457412] env[63024]: DEBUG oslo_vmware.api [None req-32e59551-f5e0-467a-81ad-4a69835f8cc9 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Task: {'id': task-1950773, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.501904] env[63024]: INFO nova.compute.manager [-] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Took 1.88 seconds to deallocate network for instance. [ 1705.504748] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6c7ee1b0-e26c-42d2-8cd7-e9de0b770d79 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.524142] env[63024]: DEBUG nova.network.neutron [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Successfully created port: 7f7d9f32-5f3e-4ba0-afc0-270579e87766 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1705.534321] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bee2b97d-8a88-4121-b786-dd7a0211d533 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.575053] env[63024]: DEBUG nova.compute.manager [req-57d4b773-f9a9-41fb-baab-cc7239e81bd6 req-5483668e-c9e0-4674-8018-657e48d50500 service nova] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Detach interface failed, port_id=879d1c91-c785-4da7-852e-abd159810127, reason: Instance 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 1705.609819] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950770, 'name': CreateVM_Task, 'duration_secs': 0.432153} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1705.613456] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1705.614916] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1705.616187] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1705.616187] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1705.616187] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49ff5d50-9690-463a-951b-bb928ea8164e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.621680] env[63024]: DEBUG oslo_vmware.api [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Waiting for the task: (returnval){ [ 1705.621680] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52129c0b-5383-d01f-ad25-ef62924293b0" [ 1705.621680] env[63024]: _type = "Task" [ 1705.621680] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1705.636629] env[63024]: DEBUG oslo_vmware.api [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52129c0b-5383-d01f-ad25-ef62924293b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.743284] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d6baad8-5ee7-4240-b5c8-fea69b59bbe6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.752655] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b8e0fe8-1cd7-4b50-9c78-af0f299ba365 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.790593] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01afaad6-10b6-47de-b796-2a5546e23c6a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.798667] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ed6d97d-ce73-47fd-9c7f-e1e4bb94ac9b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.814084] env[63024]: DEBUG nova.compute.provider_tree [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1705.842322] env[63024]: DEBUG oslo_vmware.api [None req-c8d52a10-ff02-4350-b082-e89db6eaa53f tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Task: {'id': task-1950771, 'name': PowerOffVM_Task, 'duration_secs': 0.174223} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1705.842813] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8d52a10-ff02-4350-b082-e89db6eaa53f tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1705.842813] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c8d52a10-ff02-4350-b082-e89db6eaa53f tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1705.842970] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-66ab9d6d-e7b1-41eb-a8d5-6018b35939a8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.916788] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c8d52a10-ff02-4350-b082-e89db6eaa53f tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1705.917101] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c8d52a10-ff02-4350-b082-e89db6eaa53f tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1705.917476] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8d52a10-ff02-4350-b082-e89db6eaa53f tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Deleting the datastore file [datastore1] 28b3bfc7-2bed-4941-9f48-8bd301e1a971 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1705.917809] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-29470f54-1e63-4152-8640-960159f6c1a6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1705.924286] env[63024]: DEBUG oslo_vmware.api [None req-c8d52a10-ff02-4350-b082-e89db6eaa53f tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Waiting for the task: (returnval){ [ 1705.924286] env[63024]: value = "task-1950775" [ 1705.924286] env[63024]: _type = "Task" [ 1705.924286] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1705.937288] env[63024]: DEBUG oslo_vmware.api [None req-c8d52a10-ff02-4350-b082-e89db6eaa53f tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Task: {'id': task-1950775, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1705.954692] env[63024]: DEBUG oslo_vmware.api [None req-32e59551-f5e0-467a-81ad-4a69835f8cc9 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Task: {'id': task-1950773, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.178892} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1705.954993] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-32e59551-f5e0-467a-81ad-4a69835f8cc9 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1705.955241] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-32e59551-f5e0-467a-81ad-4a69835f8cc9 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1705.955769] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-32e59551-f5e0-467a-81ad-4a69835f8cc9 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1705.956055] env[63024]: INFO nova.compute.manager [None req-32e59551-f5e0-467a-81ad-4a69835f8cc9 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1705.956507] env[63024]: DEBUG oslo.service.loopingcall [None req-32e59551-f5e0-467a-81ad-4a69835f8cc9 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1705.959847] env[63024]: DEBUG nova.compute.manager [-] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1705.959847] env[63024]: DEBUG nova.network.neutron [-] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1706.020568] env[63024]: DEBUG oslo_concurrency.lockutils [None req-26213794-cad2-4b34-96bf-cb02bddb6fc7 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1706.133737] env[63024]: DEBUG oslo_vmware.api [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52129c0b-5383-d01f-ad25-ef62924293b0, 'name': SearchDatastore_Task, 'duration_secs': 0.008373} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1706.134046] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1706.134280] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1706.134504] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1706.134642] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1706.134806] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1706.135073] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5aad2db5-1fca-4387-adfe-e17b50d6c9cd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.149554] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1706.150220] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1706.150972] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f14a84c7-8ed0-47a4-b1e1-2180415c9f41 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.157728] env[63024]: DEBUG oslo_vmware.api [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Waiting for the task: (returnval){ [ 1706.157728] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b73cd9-4096-7bd6-05e4-17fdc6057285" [ 1706.157728] env[63024]: _type = "Task" [ 1706.157728] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1706.165866] env[63024]: DEBUG oslo_vmware.api [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b73cd9-4096-7bd6-05e4-17fdc6057285, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1706.189244] env[63024]: DEBUG nova.network.neutron [req-ec52460a-c08e-4dc7-8243-12b9f69f2fdc req-81b15cdd-9d92-4701-ad55-d0d239892bac service nova] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Updated VIF entry in instance network info cache for port 67b8a4fd-320c-4178-ab08-8bbe9fb878ba. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1706.189612] env[63024]: DEBUG nova.network.neutron [req-ec52460a-c08e-4dc7-8243-12b9f69f2fdc req-81b15cdd-9d92-4701-ad55-d0d239892bac service nova] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Updating instance_info_cache with network_info: [{"id": "67b8a4fd-320c-4178-ab08-8bbe9fb878ba", "address": "fa:16:3e:bb:35:36", "network": {"id": "b4353b4e-cb5e-42f6-979d-a0622ed453a3", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1437328880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c7ed27976af940448e1017ee9c572fa9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "93341b73-918c-4e9d-9c66-ca171a54b574", "external-id": "nsx-vlan-transportzone-663", "segmentation_id": 663, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67b8a4fd-32", "ovs_interfaceid": "67b8a4fd-320c-4178-ab08-8bbe9fb878ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1706.364245] env[63024]: DEBUG nova.scheduler.client.report [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Updated inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with generation 65 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1706.364498] env[63024]: DEBUG nova.compute.provider_tree [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Updating resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b generation from 65 to 66 during operation: update_inventory {{(pid=63024) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1706.364827] env[63024]: DEBUG nova.compute.provider_tree [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1706.435985] env[63024]: DEBUG oslo_vmware.api [None req-c8d52a10-ff02-4350-b082-e89db6eaa53f tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Task: {'id': task-1950775, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1706.669354] env[63024]: DEBUG oslo_vmware.api [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b73cd9-4096-7bd6-05e4-17fdc6057285, 'name': SearchDatastore_Task, 'duration_secs': 0.017934} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1706.670457] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f286c1d-ff2c-4daf-9e74-4d88635e876d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1706.681863] env[63024]: DEBUG oslo_vmware.api [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Waiting for the task: (returnval){ [ 1706.681863] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5297053b-d84e-fadf-532e-bf2cb09ceeb7" [ 1706.681863] env[63024]: _type = "Task" [ 1706.681863] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1706.690703] env[63024]: DEBUG oslo_vmware.api [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5297053b-d84e-fadf-532e-bf2cb09ceeb7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1706.693116] env[63024]: DEBUG oslo_concurrency.lockutils [req-ec52460a-c08e-4dc7-8243-12b9f69f2fdc req-81b15cdd-9d92-4701-ad55-d0d239892bac service nova] Releasing lock "refresh_cache-9679a1a2-b003-4a60-a812-8b3a9b5f545f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1706.871163] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.957s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1706.872527] env[63024]: DEBUG nova.compute.manager [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1706.874469] env[63024]: DEBUG oslo_concurrency.lockutils [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 36.625s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1706.876093] env[63024]: INFO nova.compute.claims [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1706.897829] env[63024]: DEBUG nova.network.neutron [-] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1706.910112] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7c7fbc1c-dfd2-4372-8c48-b913ca55d487 tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Acquiring lock "7146277f-2621-4e8f-a14c-49bf4dd052db" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1706.910409] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7c7fbc1c-dfd2-4372-8c48-b913ca55d487 tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Lock "7146277f-2621-4e8f-a14c-49bf4dd052db" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1706.910666] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7c7fbc1c-dfd2-4372-8c48-b913ca55d487 tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Acquiring lock "7146277f-2621-4e8f-a14c-49bf4dd052db-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1706.910801] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7c7fbc1c-dfd2-4372-8c48-b913ca55d487 tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Lock "7146277f-2621-4e8f-a14c-49bf4dd052db-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1706.910963] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7c7fbc1c-dfd2-4372-8c48-b913ca55d487 tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Lock "7146277f-2621-4e8f-a14c-49bf4dd052db-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1706.917040] env[63024]: INFO nova.compute.manager [None req-7c7fbc1c-dfd2-4372-8c48-b913ca55d487 tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Terminating instance [ 1706.942018] env[63024]: DEBUG oslo_vmware.api [None req-c8d52a10-ff02-4350-b082-e89db6eaa53f tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Task: {'id': task-1950775, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.547815} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1706.942018] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8d52a10-ff02-4350-b082-e89db6eaa53f tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1706.942018] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c8d52a10-ff02-4350-b082-e89db6eaa53f tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1706.942018] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c8d52a10-ff02-4350-b082-e89db6eaa53f tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1706.942018] env[63024]: INFO nova.compute.manager [None req-c8d52a10-ff02-4350-b082-e89db6eaa53f tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Took 1.63 seconds to destroy the instance on the hypervisor. [ 1706.942263] env[63024]: DEBUG oslo.service.loopingcall [None req-c8d52a10-ff02-4350-b082-e89db6eaa53f tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1706.942263] env[63024]: DEBUG nova.compute.manager [-] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1706.942263] env[63024]: DEBUG nova.network.neutron [-] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1707.038859] env[63024]: DEBUG nova.compute.manager [req-39a28a87-a1e5-4dad-9cbc-b7af236c0b97 req-b3bf6d21-c45f-44eb-ab4f-cfb94e0eb988 service nova] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] Received event network-vif-deleted-468acd69-27ef-4644-8085-504eba6c7955 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1707.196602] env[63024]: DEBUG oslo_vmware.api [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5297053b-d84e-fadf-532e-bf2cb09ceeb7, 'name': SearchDatastore_Task, 'duration_secs': 0.009164} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1707.197163] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1707.197512] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 9679a1a2-b003-4a60-a812-8b3a9b5f545f/9679a1a2-b003-4a60-a812-8b3a9b5f545f.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1707.197882] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8ff079c5-4ef6-4f05-9958-64ce048a7614 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.207918] env[63024]: DEBUG oslo_vmware.api [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Waiting for the task: (returnval){ [ 1707.207918] env[63024]: value = "task-1950777" [ 1707.207918] env[63024]: _type = "Task" [ 1707.207918] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1707.220927] env[63024]: DEBUG oslo_vmware.api [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Task: {'id': task-1950777, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.383878] env[63024]: DEBUG nova.compute.utils [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1707.389202] env[63024]: DEBUG nova.compute.manager [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1707.389202] env[63024]: DEBUG nova.network.neutron [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1707.399938] env[63024]: INFO nova.compute.manager [-] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] Took 1.44 seconds to deallocate network for instance. [ 1707.425877] env[63024]: DEBUG nova.compute.manager [None req-7c7fbc1c-dfd2-4372-8c48-b913ca55d487 tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1707.426112] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-7c7fbc1c-dfd2-4372-8c48-b913ca55d487 tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1707.427147] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e346374-3c51-423e-b110-dd6a3793bb1e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.435569] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c7fbc1c-dfd2-4372-8c48-b913ca55d487 tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1707.435900] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-95474160-db45-4833-afb3-15ffb7159c01 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.447045] env[63024]: DEBUG oslo_vmware.api [None req-7c7fbc1c-dfd2-4372-8c48-b913ca55d487 tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Waiting for the task: (returnval){ [ 1707.447045] env[63024]: value = "task-1950778" [ 1707.447045] env[63024]: _type = "Task" [ 1707.447045] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1707.459979] env[63024]: DEBUG oslo_vmware.api [None req-7c7fbc1c-dfd2-4372-8c48-b913ca55d487 tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Task: {'id': task-1950778, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.486219] env[63024]: DEBUG nova.policy [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '868b92472ee24327a55c68efce691ba9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8a5452991c0c433d987f52bad5c89d22', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1707.720658] env[63024]: DEBUG oslo_vmware.api [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Task: {'id': task-1950777, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1707.851495] env[63024]: DEBUG nova.network.neutron [-] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1707.890017] env[63024]: DEBUG nova.compute.manager [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1707.911087] env[63024]: DEBUG oslo_concurrency.lockutils [None req-32e59551-f5e0-467a-81ad-4a69835f8cc9 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1707.965294] env[63024]: DEBUG oslo_vmware.api [None req-7c7fbc1c-dfd2-4372-8c48-b913ca55d487 tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Task: {'id': task-1950778, 'name': PowerOffVM_Task, 'duration_secs': 0.419644} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1707.966387] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c7fbc1c-dfd2-4372-8c48-b913ca55d487 tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1707.966387] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-7c7fbc1c-dfd2-4372-8c48-b913ca55d487 tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1707.966387] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f84dfa8c-1224-48af-9c37-1206d6116d0c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1707.977698] env[63024]: DEBUG nova.network.neutron [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Successfully created port: 241606ef-afe1-4ca8-912c-dae7639e4941 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1708.074618] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-7c7fbc1c-dfd2-4372-8c48-b913ca55d487 tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1708.074910] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-7c7fbc1c-dfd2-4372-8c48-b913ca55d487 tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1708.074910] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c7fbc1c-dfd2-4372-8c48-b913ca55d487 tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Deleting the datastore file [datastore1] 7146277f-2621-4e8f-a14c-49bf4dd052db {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1708.075231] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8cce87fc-a0e9-4431-84a1-f303253120fd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.086029] env[63024]: DEBUG oslo_vmware.api [None req-7c7fbc1c-dfd2-4372-8c48-b913ca55d487 tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Waiting for the task: (returnval){ [ 1708.086029] env[63024]: value = "task-1950780" [ 1708.086029] env[63024]: _type = "Task" [ 1708.086029] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1708.102552] env[63024]: DEBUG oslo_vmware.api [None req-7c7fbc1c-dfd2-4372-8c48-b913ca55d487 tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Task: {'id': task-1950780, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.225998] env[63024]: DEBUG oslo_vmware.api [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Task: {'id': task-1950777, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.518407} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1708.225998] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 9679a1a2-b003-4a60-a812-8b3a9b5f545f/9679a1a2-b003-4a60-a812-8b3a9b5f545f.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1708.225998] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1708.225998] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-90c0ed14-61c4-40c7-85d0-e49502f9487a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.237569] env[63024]: DEBUG oslo_vmware.api [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Waiting for the task: (returnval){ [ 1708.237569] env[63024]: value = "task-1950781" [ 1708.237569] env[63024]: _type = "Task" [ 1708.237569] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1708.251183] env[63024]: DEBUG oslo_vmware.api [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Task: {'id': task-1950781, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.306470] env[63024]: DEBUG nova.compute.manager [req-e871a5f2-ed6d-477f-beea-c9c0d376f3c6 req-077e18a9-38d5-4da8-953c-383432372351 service nova] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Received event network-vif-plugged-52fd1b1a-cd75-4bd3-967c-b98213510df1 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1708.306470] env[63024]: DEBUG oslo_concurrency.lockutils [req-e871a5f2-ed6d-477f-beea-c9c0d376f3c6 req-077e18a9-38d5-4da8-953c-383432372351 service nova] Acquiring lock "f6fddc23-ad36-4d6f-82a2-ded456b2596e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1708.306470] env[63024]: DEBUG oslo_concurrency.lockutils [req-e871a5f2-ed6d-477f-beea-c9c0d376f3c6 req-077e18a9-38d5-4da8-953c-383432372351 service nova] Lock "f6fddc23-ad36-4d6f-82a2-ded456b2596e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1708.306470] env[63024]: DEBUG oslo_concurrency.lockutils [req-e871a5f2-ed6d-477f-beea-c9c0d376f3c6 req-077e18a9-38d5-4da8-953c-383432372351 service nova] Lock "f6fddc23-ad36-4d6f-82a2-ded456b2596e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1708.306470] env[63024]: DEBUG nova.compute.manager [req-e871a5f2-ed6d-477f-beea-c9c0d376f3c6 req-077e18a9-38d5-4da8-953c-383432372351 service nova] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] No waiting events found dispatching network-vif-plugged-52fd1b1a-cd75-4bd3-967c-b98213510df1 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1708.306711] env[63024]: WARNING nova.compute.manager [req-e871a5f2-ed6d-477f-beea-c9c0d376f3c6 req-077e18a9-38d5-4da8-953c-383432372351 service nova] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Received unexpected event network-vif-plugged-52fd1b1a-cd75-4bd3-967c-b98213510df1 for instance with vm_state building and task_state spawning. [ 1708.312785] env[63024]: DEBUG nova.network.neutron [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Successfully updated port: 52fd1b1a-cd75-4bd3-967c-b98213510df1 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1708.358773] env[63024]: INFO nova.compute.manager [-] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Took 1.42 seconds to deallocate network for instance. [ 1708.517034] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-516fc259-fdba-4fcc-ab50-2789bd25e65c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.526909] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e4ad038-ceac-497e-82be-86acaebb35fc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.563455] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81e23d3d-766f-4875-a7e2-3f87a9f7102c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.572342] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fde496f-e32f-4372-9f0a-fbe13b5a396f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.587526] env[63024]: DEBUG nova.compute.provider_tree [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1708.600569] env[63024]: DEBUG oslo_vmware.api [None req-7c7fbc1c-dfd2-4372-8c48-b913ca55d487 tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Task: {'id': task-1950780, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.748894] env[63024]: DEBUG oslo_vmware.api [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Task: {'id': task-1950781, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.160726} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1708.750802] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1708.751768] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a859e94d-cffe-4467-885e-14aae717ff9a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.776728] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Reconfiguring VM instance instance-00000025 to attach disk [datastore1] 9679a1a2-b003-4a60-a812-8b3a9b5f545f/9679a1a2-b003-4a60-a812-8b3a9b5f545f.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1708.777612] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9e381307-502b-4516-af2c-32d4b09daa52 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.800065] env[63024]: DEBUG oslo_vmware.api [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Waiting for the task: (returnval){ [ 1708.800065] env[63024]: value = "task-1950783" [ 1708.800065] env[63024]: _type = "Task" [ 1708.800065] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1708.809941] env[63024]: DEBUG oslo_vmware.api [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Task: {'id': task-1950783, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1708.867280] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c8d52a10-ff02-4350-b082-e89db6eaa53f tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1708.906977] env[63024]: DEBUG nova.compute.manager [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1708.941761] env[63024]: DEBUG nova.virt.hardware [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T11:08:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='270e2a66-632e-41fa-bb7b-06506c9e6093',id=37,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-717992172',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1708.942025] env[63024]: DEBUG nova.virt.hardware [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1708.942220] env[63024]: DEBUG nova.virt.hardware [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1708.942401] env[63024]: DEBUG nova.virt.hardware [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1708.942544] env[63024]: DEBUG nova.virt.hardware [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1708.942687] env[63024]: DEBUG nova.virt.hardware [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1708.943205] env[63024]: DEBUG nova.virt.hardware [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1708.943205] env[63024]: DEBUG nova.virt.hardware [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1708.943474] env[63024]: DEBUG nova.virt.hardware [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1708.943561] env[63024]: DEBUG nova.virt.hardware [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1708.943732] env[63024]: DEBUG nova.virt.hardware [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1708.944634] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57762fe1-fc40-4513-96c9-62e6e4f8ce72 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1708.953775] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb3ecb63-0152-4555-8e57-b64fb5454176 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.094447] env[63024]: DEBUG nova.scheduler.client.report [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1709.101846] env[63024]: DEBUG oslo_vmware.api [None req-7c7fbc1c-dfd2-4372-8c48-b913ca55d487 tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Task: {'id': task-1950780, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.667357} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1709.102531] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c7fbc1c-dfd2-4372-8c48-b913ca55d487 tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1709.102732] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-7c7fbc1c-dfd2-4372-8c48-b913ca55d487 tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1709.102912] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-7c7fbc1c-dfd2-4372-8c48-b913ca55d487 tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1709.103097] env[63024]: INFO nova.compute.manager [None req-7c7fbc1c-dfd2-4372-8c48-b913ca55d487 tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Took 1.68 seconds to destroy the instance on the hypervisor. [ 1709.103508] env[63024]: DEBUG oslo.service.loopingcall [None req-7c7fbc1c-dfd2-4372-8c48-b913ca55d487 tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1709.103714] env[63024]: DEBUG nova.compute.manager [-] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1709.103811] env[63024]: DEBUG nova.network.neutron [-] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1709.311030] env[63024]: DEBUG oslo_vmware.api [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Task: {'id': task-1950783, 'name': ReconfigVM_Task, 'duration_secs': 0.407563} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1709.311219] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Reconfigured VM instance instance-00000025 to attach disk [datastore1] 9679a1a2-b003-4a60-a812-8b3a9b5f545f/9679a1a2-b003-4a60-a812-8b3a9b5f545f.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1709.311871] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9062f9ba-c5ce-4938-8f55-c907252a43cc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.319594] env[63024]: DEBUG oslo_vmware.api [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Waiting for the task: (returnval){ [ 1709.319594] env[63024]: value = "task-1950784" [ 1709.319594] env[63024]: _type = "Task" [ 1709.319594] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1709.328411] env[63024]: DEBUG oslo_vmware.api [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Task: {'id': task-1950784, 'name': Rename_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.605988] env[63024]: DEBUG oslo_concurrency.lockutils [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.729s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1709.605988] env[63024]: DEBUG nova.compute.manager [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1709.607584] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 29.263s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1709.607985] env[63024]: DEBUG nova.objects.instance [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 610dd030-5080-498a-8744-b1411297d70d] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63024) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1709.754382] env[63024]: DEBUG nova.network.neutron [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Successfully updated port: 241606ef-afe1-4ca8-912c-dae7639e4941 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1709.830629] env[63024]: DEBUG oslo_vmware.api [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Task: {'id': task-1950784, 'name': Rename_Task, 'duration_secs': 0.152394} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1709.830970] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1709.831303] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cb45a9a7-550d-4786-b3c2-f0ee31c7b8c8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1709.841753] env[63024]: DEBUG oslo_vmware.api [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Waiting for the task: (returnval){ [ 1709.841753] env[63024]: value = "task-1950785" [ 1709.841753] env[63024]: _type = "Task" [ 1709.841753] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1709.851882] env[63024]: DEBUG oslo_vmware.api [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Task: {'id': task-1950785, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1709.925128] env[63024]: DEBUG nova.compute.manager [req-cf697df4-2c74-45ac-88df-a6575935d731 req-8e3f3ad1-c02f-46aa-9ca2-f273a749e14c service nova] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Received event network-vif-deleted-ab9681ef-e7dc-4992-be61-3ef37483b9b8 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1710.113575] env[63024]: DEBUG nova.compute.utils [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1710.117533] env[63024]: DEBUG nova.compute.manager [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1710.117704] env[63024]: DEBUG nova.network.neutron [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1710.141489] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Acquiring lock "5c2efe96-4ac4-4693-9203-43407d768f66" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1710.141724] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Lock "5c2efe96-4ac4-4693-9203-43407d768f66" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1710.159943] env[63024]: DEBUG nova.policy [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e1fbd9f78c4949308299f340ea6f0e4e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ad3f57a3e2224175812d7816ea5327fb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1710.258770] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Acquiring lock "refresh_cache-650a97b9-911e-44b0-9e82-a6d4cc95c9dd" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1710.258958] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Acquired lock "refresh_cache-650a97b9-911e-44b0-9e82-a6d4cc95c9dd" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1710.259198] env[63024]: DEBUG nova.network.neutron [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1710.355253] env[63024]: DEBUG oslo_vmware.api [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Task: {'id': task-1950785, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1710.517852] env[63024]: DEBUG nova.network.neutron [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Successfully created port: 2edee58c-e76b-46ad-b4c8-3b2a70467c01 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1710.621608] env[63024]: DEBUG nova.compute.manager [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1710.633297] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8f581567-c01b-4f6a-a8b0-69cfcb1761c2 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.019s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1710.633297] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.379s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1710.633297] env[63024]: INFO nova.compute.claims [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1710.814280] env[63024]: DEBUG nova.network.neutron [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1710.852977] env[63024]: DEBUG oslo_vmware.api [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Task: {'id': task-1950785, 'name': PowerOnVM_Task, 'duration_secs': 0.672115} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1710.853351] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1710.853572] env[63024]: INFO nova.compute.manager [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Took 8.57 seconds to spawn the instance on the hypervisor. [ 1710.853758] env[63024]: DEBUG nova.compute.manager [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1710.854523] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6adda96-aaa8-4257-b314-ba47fccb30f0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.084756] env[63024]: DEBUG nova.compute.manager [req-6cfd33d8-aeb4-4538-8545-54e81ba5d6c4 req-b0e78982-4d55-48b4-9409-a5e580586c1b service nova] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Received event network-changed-52fd1b1a-cd75-4bd3-967c-b98213510df1 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1711.084959] env[63024]: DEBUG nova.compute.manager [req-6cfd33d8-aeb4-4538-8545-54e81ba5d6c4 req-b0e78982-4d55-48b4-9409-a5e580586c1b service nova] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Refreshing instance network info cache due to event network-changed-52fd1b1a-cd75-4bd3-967c-b98213510df1. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1711.085194] env[63024]: DEBUG oslo_concurrency.lockutils [req-6cfd33d8-aeb4-4538-8545-54e81ba5d6c4 req-b0e78982-4d55-48b4-9409-a5e580586c1b service nova] Acquiring lock "refresh_cache-f6fddc23-ad36-4d6f-82a2-ded456b2596e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1711.085540] env[63024]: DEBUG oslo_concurrency.lockutils [req-6cfd33d8-aeb4-4538-8545-54e81ba5d6c4 req-b0e78982-4d55-48b4-9409-a5e580586c1b service nova] Acquired lock "refresh_cache-f6fddc23-ad36-4d6f-82a2-ded456b2596e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1711.085540] env[63024]: DEBUG nova.network.neutron [req-6cfd33d8-aeb4-4538-8545-54e81ba5d6c4 req-b0e78982-4d55-48b4-9409-a5e580586c1b service nova] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Refreshing network info cache for port 52fd1b1a-cd75-4bd3-967c-b98213510df1 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1711.133633] env[63024]: INFO nova.virt.block_device [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Booting with volume 1e878e2f-2053-4c16-ad0c-263307073b4e at /dev/sda [ 1711.137560] env[63024]: DEBUG nova.network.neutron [-] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1711.177611] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f45170e6-7e29-4a24-9c7e-964326d3cf78 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.188636] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eb784a2-370a-45ab-99bc-81ec851dd747 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.200721] env[63024]: DEBUG nova.network.neutron [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Updating instance_info_cache with network_info: [{"id": "241606ef-afe1-4ca8-912c-dae7639e4941", "address": "fa:16:3e:fc:f3:01", "network": {"id": "b22bfe1a-f169-41c3-ac9b-fdcf93268110", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.130", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d78401abb63840f4b461856cfdb6dbbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap241606ef-af", "ovs_interfaceid": "241606ef-afe1-4ca8-912c-dae7639e4941", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1711.236407] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e44236d5-aeca-4876-944b-42e88da8455a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.251975] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bea2d69-5053-4259-881a-974585570758 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.293309] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f388b68e-e01f-430c-939f-393013249096 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.302864] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ca33f38-eeaa-43c5-8956-8d8ff8315f38 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.322536] env[63024]: DEBUG nova.virt.block_device [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Updating existing volume attachment record: 646d405f-a452-416e-b3d6-df8ff624964b {{(pid=63024) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1711.387779] env[63024]: INFO nova.compute.manager [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Took 53.75 seconds to build instance. [ 1711.595684] env[63024]: DEBUG nova.network.neutron [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Successfully updated port: e82533e3-2173-4dc5-911c-829fa32117ad {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1711.644732] env[63024]: DEBUG nova.network.neutron [req-6cfd33d8-aeb4-4538-8545-54e81ba5d6c4 req-b0e78982-4d55-48b4-9409-a5e580586c1b service nova] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1711.652184] env[63024]: INFO nova.compute.manager [-] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Took 2.55 seconds to deallocate network for instance. [ 1711.708334] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Releasing lock "refresh_cache-650a97b9-911e-44b0-9e82-a6d4cc95c9dd" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1711.708517] env[63024]: DEBUG nova.compute.manager [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Instance network_info: |[{"id": "241606ef-afe1-4ca8-912c-dae7639e4941", "address": "fa:16:3e:fc:f3:01", "network": {"id": "b22bfe1a-f169-41c3-ac9b-fdcf93268110", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.130", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d78401abb63840f4b461856cfdb6dbbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap241606ef-af", "ovs_interfaceid": "241606ef-afe1-4ca8-912c-dae7639e4941", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1711.709195] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fc:f3:01', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8652fa2-e608-4fe6-8b35-402a40906b40', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '241606ef-afe1-4ca8-912c-dae7639e4941', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1711.716820] env[63024]: DEBUG oslo.service.loopingcall [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1711.719509] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1711.720582] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fae606dc-551c-42e5-b576-a35693cb70a8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1711.746307] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1711.746307] env[63024]: value = "task-1950787" [ 1711.746307] env[63024]: _type = "Task" [ 1711.746307] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1711.758495] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950787, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1711.825632] env[63024]: DEBUG nova.network.neutron [req-6cfd33d8-aeb4-4538-8545-54e81ba5d6c4 req-b0e78982-4d55-48b4-9409-a5e580586c1b service nova] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1711.891803] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2d6d5341-aa95-4a15-9f34-fbf7ce369240 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Lock "9679a1a2-b003-4a60-a812-8b3a9b5f545f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.735s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1712.111021] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Acquiring lock "8a826350-0fee-409d-a3fc-260d7d43bdf6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1712.111278] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Lock "8a826350-0fee-409d-a3fc-260d7d43bdf6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1712.111474] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Acquiring lock "8a826350-0fee-409d-a3fc-260d7d43bdf6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1712.111655] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Lock "8a826350-0fee-409d-a3fc-260d7d43bdf6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1712.111810] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Lock "8a826350-0fee-409d-a3fc-260d7d43bdf6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1712.114742] env[63024]: INFO nova.compute.manager [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Terminating instance [ 1712.136890] env[63024]: DEBUG nova.network.neutron [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Successfully updated port: 2edee58c-e76b-46ad-b4c8-3b2a70467c01 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1712.165271] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7c7fbc1c-dfd2-4372-8c48-b913ca55d487 tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1712.203036] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-473476d6-53a7-4789-b8d4-d468171b2bb4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.211751] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d036721-89d8-440d-abd6-9c2688fc3439 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.255567] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-641104c2-6ca5-4fbc-a5d0-4896e48ceaba {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.269080] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e5c9360-aa30-43cc-9b88-913657469085 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.273746] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950787, 'name': CreateVM_Task, 'duration_secs': 0.354225} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1712.273746] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1712.274736] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1712.274932] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1712.275759] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1712.276075] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c0f8768b-8c02-4880-aa73-085a17861676 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.290151] env[63024]: DEBUG nova.compute.provider_tree [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1712.297130] env[63024]: DEBUG oslo_vmware.api [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Waiting for the task: (returnval){ [ 1712.297130] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]529682b7-5b70-9433-2a13-3f60b590cb79" [ 1712.297130] env[63024]: _type = "Task" [ 1712.297130] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1712.308633] env[63024]: DEBUG oslo_vmware.api [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]529682b7-5b70-9433-2a13-3f60b590cb79, 'name': SearchDatastore_Task} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1712.308928] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1712.309182] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1712.309408] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1712.309599] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1712.309725] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1712.309996] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6196ca04-daee-4549-bbeb-47c0c8c0b707 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.319525] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1712.319824] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1712.320612] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39f26344-eec6-459a-b4cc-bcf1f3198ef0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.327536] env[63024]: DEBUG oslo_vmware.api [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Waiting for the task: (returnval){ [ 1712.327536] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]524e2e2b-e00a-5e32-c38e-a8d0512078a4" [ 1712.327536] env[63024]: _type = "Task" [ 1712.327536] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1712.331016] env[63024]: DEBUG oslo_concurrency.lockutils [req-6cfd33d8-aeb4-4538-8545-54e81ba5d6c4 req-b0e78982-4d55-48b4-9409-a5e580586c1b service nova] Releasing lock "refresh_cache-f6fddc23-ad36-4d6f-82a2-ded456b2596e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1712.339485] env[63024]: DEBUG oslo_vmware.api [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]524e2e2b-e00a-5e32-c38e-a8d0512078a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1712.396850] env[63024]: DEBUG nova.compute.manager [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1712.618182] env[63024]: DEBUG nova.compute.manager [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1712.618482] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1712.619488] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff6fb84b-a33b-49af-9029-6023f58da468 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.628360] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1712.628615] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b808d8fc-a44c-4781-bb99-730ec4f3f441 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.636905] env[63024]: DEBUG oslo_vmware.api [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Waiting for the task: (returnval){ [ 1712.636905] env[63024]: value = "task-1950788" [ 1712.636905] env[63024]: _type = "Task" [ 1712.636905] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1712.641131] env[63024]: DEBUG oslo_concurrency.lockutils [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Acquiring lock "refresh_cache-49eb6292-012a-4296-aff8-9c460866a602" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1712.641131] env[63024]: DEBUG oslo_concurrency.lockutils [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Acquired lock "refresh_cache-49eb6292-012a-4296-aff8-9c460866a602" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1712.641350] env[63024]: DEBUG nova.network.neutron [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1712.649477] env[63024]: DEBUG oslo_vmware.api [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Task: {'id': task-1950788, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1712.773999] env[63024]: DEBUG nova.compute.manager [req-5f3448da-e6fc-47a0-89c3-73e82c821b4b req-6ec3c1cf-8108-4549-ad61-0c5ce855d5b6 service nova] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Received event network-vif-plugged-241606ef-afe1-4ca8-912c-dae7639e4941 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1712.773999] env[63024]: DEBUG oslo_concurrency.lockutils [req-5f3448da-e6fc-47a0-89c3-73e82c821b4b req-6ec3c1cf-8108-4549-ad61-0c5ce855d5b6 service nova] Acquiring lock "650a97b9-911e-44b0-9e82-a6d4cc95c9dd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1712.774332] env[63024]: DEBUG oslo_concurrency.lockutils [req-5f3448da-e6fc-47a0-89c3-73e82c821b4b req-6ec3c1cf-8108-4549-ad61-0c5ce855d5b6 service nova] Lock "650a97b9-911e-44b0-9e82-a6d4cc95c9dd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1712.774518] env[63024]: DEBUG oslo_concurrency.lockutils [req-5f3448da-e6fc-47a0-89c3-73e82c821b4b req-6ec3c1cf-8108-4549-ad61-0c5ce855d5b6 service nova] Lock "650a97b9-911e-44b0-9e82-a6d4cc95c9dd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1712.774754] env[63024]: DEBUG nova.compute.manager [req-5f3448da-e6fc-47a0-89c3-73e82c821b4b req-6ec3c1cf-8108-4549-ad61-0c5ce855d5b6 service nova] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] No waiting events found dispatching network-vif-plugged-241606ef-afe1-4ca8-912c-dae7639e4941 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1712.774969] env[63024]: WARNING nova.compute.manager [req-5f3448da-e6fc-47a0-89c3-73e82c821b4b req-6ec3c1cf-8108-4549-ad61-0c5ce855d5b6 service nova] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Received unexpected event network-vif-plugged-241606ef-afe1-4ca8-912c-dae7639e4941 for instance with vm_state building and task_state spawning. [ 1712.775168] env[63024]: DEBUG nova.compute.manager [req-5f3448da-e6fc-47a0-89c3-73e82c821b4b req-6ec3c1cf-8108-4549-ad61-0c5ce855d5b6 service nova] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Received event network-changed-241606ef-afe1-4ca8-912c-dae7639e4941 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1712.775387] env[63024]: DEBUG nova.compute.manager [req-5f3448da-e6fc-47a0-89c3-73e82c821b4b req-6ec3c1cf-8108-4549-ad61-0c5ce855d5b6 service nova] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Refreshing instance network info cache due to event network-changed-241606ef-afe1-4ca8-912c-dae7639e4941. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1712.775592] env[63024]: DEBUG oslo_concurrency.lockutils [req-5f3448da-e6fc-47a0-89c3-73e82c821b4b req-6ec3c1cf-8108-4549-ad61-0c5ce855d5b6 service nova] Acquiring lock "refresh_cache-650a97b9-911e-44b0-9e82-a6d4cc95c9dd" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1712.775738] env[63024]: DEBUG oslo_concurrency.lockutils [req-5f3448da-e6fc-47a0-89c3-73e82c821b4b req-6ec3c1cf-8108-4549-ad61-0c5ce855d5b6 service nova] Acquired lock "refresh_cache-650a97b9-911e-44b0-9e82-a6d4cc95c9dd" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1712.775899] env[63024]: DEBUG nova.network.neutron [req-5f3448da-e6fc-47a0-89c3-73e82c821b4b req-6ec3c1cf-8108-4549-ad61-0c5ce855d5b6 service nova] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Refreshing network info cache for port 241606ef-afe1-4ca8-912c-dae7639e4941 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1712.794157] env[63024]: DEBUG nova.scheduler.client.report [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1712.841520] env[63024]: DEBUG oslo_vmware.api [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]524e2e2b-e00a-5e32-c38e-a8d0512078a4, 'name': SearchDatastore_Task, 'duration_secs': 0.009689} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1712.842468] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-555d9c56-6cce-45d7-8b5b-4f0e34bfe232 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1712.849956] env[63024]: DEBUG oslo_vmware.api [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Waiting for the task: (returnval){ [ 1712.849956] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e7b483-3691-2f92-3f95-9a0f887c4e8d" [ 1712.849956] env[63024]: _type = "Task" [ 1712.849956] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1712.858796] env[63024]: DEBUG oslo_vmware.api [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e7b483-3691-2f92-3f95-9a0f887c4e8d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1712.920030] env[63024]: DEBUG oslo_concurrency.lockutils [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1713.150429] env[63024]: DEBUG oslo_vmware.api [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Task: {'id': task-1950788, 'name': PowerOffVM_Task, 'duration_secs': 0.188382} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1713.151565] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1713.151913] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1713.153063] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-912f228f-d758-48fe-aa9b-825f24f9353e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.192578] env[63024]: DEBUG nova.network.neutron [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1713.301223] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.671s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1713.301223] env[63024]: DEBUG nova.compute.manager [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1713.307542] env[63024]: DEBUG oslo_concurrency.lockutils [None req-45f4a3d9-7e19-42fd-8f06-b8994703ad36 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.029s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1713.307943] env[63024]: DEBUG oslo_concurrency.lockutils [None req-45f4a3d9-7e19-42fd-8f06-b8994703ad36 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1713.310413] env[63024]: DEBUG oslo_concurrency.lockutils [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.700s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1713.314189] env[63024]: INFO nova.compute.claims [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1713.356495] env[63024]: INFO nova.scheduler.client.report [None req-45f4a3d9-7e19-42fd-8f06-b8994703ad36 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Deleted allocations for instance 610dd030-5080-498a-8744-b1411297d70d [ 1713.369915] env[63024]: DEBUG oslo_vmware.api [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e7b483-3691-2f92-3f95-9a0f887c4e8d, 'name': SearchDatastore_Task, 'duration_secs': 0.009984} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1713.370446] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1713.372016] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 650a97b9-911e-44b0-9e82-a6d4cc95c9dd/650a97b9-911e-44b0-9e82-a6d4cc95c9dd.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1713.372016] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f1860a29-c7d7-4ce9-b250-9ef4adfba474 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.382018] env[63024]: DEBUG oslo_vmware.api [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Waiting for the task: (returnval){ [ 1713.382018] env[63024]: value = "task-1950790" [ 1713.382018] env[63024]: _type = "Task" [ 1713.382018] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1713.391450] env[63024]: DEBUG oslo_vmware.api [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950790, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1713.446830] env[63024]: DEBUG nova.compute.manager [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1713.447681] env[63024]: DEBUG nova.virt.hardware [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1713.448171] env[63024]: DEBUG nova.virt.hardware [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1713.448997] env[63024]: DEBUG nova.virt.hardware [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1713.448997] env[63024]: DEBUG nova.virt.hardware [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1713.453024] env[63024]: DEBUG nova.virt.hardware [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1713.453024] env[63024]: DEBUG nova.virt.hardware [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1713.453024] env[63024]: DEBUG nova.virt.hardware [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1713.453024] env[63024]: DEBUG nova.virt.hardware [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1713.453024] env[63024]: DEBUG nova.virt.hardware [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1713.453254] env[63024]: DEBUG nova.virt.hardware [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1713.453254] env[63024]: DEBUG nova.virt.hardware [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1713.453254] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-168f3fab-aa0b-4c9a-a28e-756aef7312e5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.461933] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8ccda5c-96c1-41fe-a6c6-85d17f5dffe2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.503290] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1713.503290] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1713.503290] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Deleting the datastore file [datastore1] 8a826350-0fee-409d-a3fc-260d7d43bdf6 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1713.503290] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f6284d2e-3f2f-488e-8c85-d15313df31c0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1713.510169] env[63024]: DEBUG oslo_vmware.api [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Waiting for the task: (returnval){ [ 1713.510169] env[63024]: value = "task-1950791" [ 1713.510169] env[63024]: _type = "Task" [ 1713.510169] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1713.510736] env[63024]: DEBUG nova.network.neutron [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Updating instance_info_cache with network_info: [{"id": "2edee58c-e76b-46ad-b4c8-3b2a70467c01", "address": "fa:16:3e:1a:71:e7", "network": {"id": "c16b8a08-a304-46c8-bf65-ad5caee54acc", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-528457489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ad3f57a3e2224175812d7816ea5327fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2edee58c-e7", "ovs_interfaceid": "2edee58c-e76b-46ad-b4c8-3b2a70467c01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1713.522045] env[63024]: DEBUG oslo_vmware.api [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Task: {'id': task-1950791, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1713.819278] env[63024]: DEBUG nova.compute.utils [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1713.828388] env[63024]: DEBUG nova.network.neutron [req-5f3448da-e6fc-47a0-89c3-73e82c821b4b req-6ec3c1cf-8108-4549-ad61-0c5ce855d5b6 service nova] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Updated VIF entry in instance network info cache for port 241606ef-afe1-4ca8-912c-dae7639e4941. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1713.829180] env[63024]: DEBUG nova.network.neutron [req-5f3448da-e6fc-47a0-89c3-73e82c821b4b req-6ec3c1cf-8108-4549-ad61-0c5ce855d5b6 service nova] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Updating instance_info_cache with network_info: [{"id": "241606ef-afe1-4ca8-912c-dae7639e4941", "address": "fa:16:3e:fc:f3:01", "network": {"id": "b22bfe1a-f169-41c3-ac9b-fdcf93268110", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.130", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d78401abb63840f4b461856cfdb6dbbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap241606ef-af", "ovs_interfaceid": "241606ef-afe1-4ca8-912c-dae7639e4941", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1713.832247] env[63024]: DEBUG nova.compute.manager [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1713.832830] env[63024]: DEBUG nova.network.neutron [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1713.868475] env[63024]: DEBUG oslo_concurrency.lockutils [None req-45f4a3d9-7e19-42fd-8f06-b8994703ad36 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "610dd030-5080-498a-8744-b1411297d70d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.555s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1713.896909] env[63024]: DEBUG oslo_vmware.api [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950790, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1713.940133] env[63024]: DEBUG nova.policy [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3fc112b4851e4dbeac3a69409e7bf98e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1886be852b01400aaf7a31c8fe5d4d7a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1714.020016] env[63024]: DEBUG oslo_concurrency.lockutils [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Releasing lock "refresh_cache-49eb6292-012a-4296-aff8-9c460866a602" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1714.020016] env[63024]: DEBUG nova.compute.manager [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Instance network_info: |[{"id": "2edee58c-e76b-46ad-b4c8-3b2a70467c01", "address": "fa:16:3e:1a:71:e7", "network": {"id": "c16b8a08-a304-46c8-bf65-ad5caee54acc", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-528457489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ad3f57a3e2224175812d7816ea5327fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2edee58c-e7", "ovs_interfaceid": "2edee58c-e76b-46ad-b4c8-3b2a70467c01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1714.020408] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1a:71:e7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1f996252-e329-42bd-a897-446dfe2b81cd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2edee58c-e76b-46ad-b4c8-3b2a70467c01', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1714.027092] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Creating folder: Project (ad3f57a3e2224175812d7816ea5327fb). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1714.032021] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b07dc099-ae3e-49bf-a95b-4d9d757bc78b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.032816] env[63024]: DEBUG nova.compute.manager [req-941670fd-9f72-4d18-8a8e-0403c2894403 req-e4ea1c93-3271-4bab-a0c6-5437226798f0 service nova] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Received event network-vif-deleted-0b9a5894-831a-4645-8fee-497016b36839 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1714.034977] env[63024]: DEBUG nova.compute.manager [req-941670fd-9f72-4d18-8a8e-0403c2894403 req-e4ea1c93-3271-4bab-a0c6-5437226798f0 service nova] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Received event network-vif-plugged-e82533e3-2173-4dc5-911c-829fa32117ad {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1714.036738] env[63024]: DEBUG oslo_concurrency.lockutils [req-941670fd-9f72-4d18-8a8e-0403c2894403 req-e4ea1c93-3271-4bab-a0c6-5437226798f0 service nova] Acquiring lock "f6fddc23-ad36-4d6f-82a2-ded456b2596e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1714.036738] env[63024]: DEBUG oslo_concurrency.lockutils [req-941670fd-9f72-4d18-8a8e-0403c2894403 req-e4ea1c93-3271-4bab-a0c6-5437226798f0 service nova] Lock "f6fddc23-ad36-4d6f-82a2-ded456b2596e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1714.036738] env[63024]: DEBUG oslo_concurrency.lockutils [req-941670fd-9f72-4d18-8a8e-0403c2894403 req-e4ea1c93-3271-4bab-a0c6-5437226798f0 service nova] Lock "f6fddc23-ad36-4d6f-82a2-ded456b2596e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1714.036738] env[63024]: DEBUG nova.compute.manager [req-941670fd-9f72-4d18-8a8e-0403c2894403 req-e4ea1c93-3271-4bab-a0c6-5437226798f0 service nova] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] No waiting events found dispatching network-vif-plugged-e82533e3-2173-4dc5-911c-829fa32117ad {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1714.036738] env[63024]: WARNING nova.compute.manager [req-941670fd-9f72-4d18-8a8e-0403c2894403 req-e4ea1c93-3271-4bab-a0c6-5437226798f0 service nova] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Received unexpected event network-vif-plugged-e82533e3-2173-4dc5-911c-829fa32117ad for instance with vm_state building and task_state spawning. [ 1714.037314] env[63024]: DEBUG nova.compute.manager [req-941670fd-9f72-4d18-8a8e-0403c2894403 req-e4ea1c93-3271-4bab-a0c6-5437226798f0 service nova] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Received event network-changed-e82533e3-2173-4dc5-911c-829fa32117ad {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1714.037596] env[63024]: DEBUG nova.compute.manager [req-941670fd-9f72-4d18-8a8e-0403c2894403 req-e4ea1c93-3271-4bab-a0c6-5437226798f0 service nova] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Refreshing instance network info cache due to event network-changed-e82533e3-2173-4dc5-911c-829fa32117ad. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1714.037911] env[63024]: DEBUG oslo_concurrency.lockutils [req-941670fd-9f72-4d18-8a8e-0403c2894403 req-e4ea1c93-3271-4bab-a0c6-5437226798f0 service nova] Acquiring lock "refresh_cache-f6fddc23-ad36-4d6f-82a2-ded456b2596e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1714.038366] env[63024]: DEBUG oslo_concurrency.lockutils [req-941670fd-9f72-4d18-8a8e-0403c2894403 req-e4ea1c93-3271-4bab-a0c6-5437226798f0 service nova] Acquired lock "refresh_cache-f6fddc23-ad36-4d6f-82a2-ded456b2596e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1714.038658] env[63024]: DEBUG nova.network.neutron [req-941670fd-9f72-4d18-8a8e-0403c2894403 req-e4ea1c93-3271-4bab-a0c6-5437226798f0 service nova] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Refreshing network info cache for port e82533e3-2173-4dc5-911c-829fa32117ad {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1714.044149] env[63024]: DEBUG oslo_vmware.api [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Task: {'id': task-1950791, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1714.062820] env[63024]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1714.063234] env[63024]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=63024) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1714.064202] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Folder already exists: Project (ad3f57a3e2224175812d7816ea5327fb). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1714.064810] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Creating folder: Instances. Parent ref: group-v402038. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1714.065708] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c993af08-d270-42f1-bba9-d1847a8a7a98 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.079156] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Created folder: Instances in parent group-v402038. [ 1714.079156] env[63024]: DEBUG oslo.service.loopingcall [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1714.079156] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1714.079156] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b19ca77b-2bfd-4346-81e3-790779c24967 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.103773] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1714.103773] env[63024]: value = "task-1950794" [ 1714.103773] env[63024]: _type = "Task" [ 1714.103773] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1714.112824] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950794, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1714.333300] env[63024]: DEBUG nova.compute.manager [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1714.343979] env[63024]: DEBUG oslo_concurrency.lockutils [req-5f3448da-e6fc-47a0-89c3-73e82c821b4b req-6ec3c1cf-8108-4549-ad61-0c5ce855d5b6 service nova] Releasing lock "refresh_cache-650a97b9-911e-44b0-9e82-a6d4cc95c9dd" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1714.395363] env[63024]: DEBUG oslo_vmware.api [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950790, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.541477} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1714.395363] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 650a97b9-911e-44b0-9e82-a6d4cc95c9dd/650a97b9-911e-44b0-9e82-a6d4cc95c9dd.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1714.395559] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1714.396790] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-68bd6a21-01bf-4089-a722-2671ef1beabe {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.398972] env[63024]: DEBUG nova.network.neutron [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Successfully updated port: 7f7d9f32-5f3e-4ba0-afc0-270579e87766 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1714.403382] env[63024]: DEBUG oslo_vmware.api [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Waiting for the task: (returnval){ [ 1714.403382] env[63024]: value = "task-1950795" [ 1714.403382] env[63024]: _type = "Task" [ 1714.403382] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1714.418069] env[63024]: DEBUG oslo_vmware.api [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950795, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1714.525531] env[63024]: DEBUG oslo_vmware.api [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Task: {'id': task-1950791, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.77565} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1714.525795] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1714.525977] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1714.526168] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1714.526344] env[63024]: INFO nova.compute.manager [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Took 1.91 seconds to destroy the instance on the hypervisor. [ 1714.526580] env[63024]: DEBUG oslo.service.loopingcall [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1714.526769] env[63024]: DEBUG nova.compute.manager [-] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1714.526862] env[63024]: DEBUG nova.network.neutron [-] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1714.586019] env[63024]: DEBUG nova.network.neutron [req-941670fd-9f72-4d18-8a8e-0403c2894403 req-e4ea1c93-3271-4bab-a0c6-5437226798f0 service nova] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1714.619690] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950794, 'name': CreateVM_Task, 'duration_secs': 0.460238} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1714.621752] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1714.625150] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'guest_format': None, 'attachment_id': '646d405f-a452-416e-b3d6-df8ff624964b', 'boot_index': 0, 'delete_on_termination': True, 'mount_device': '/dev/sda', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402048', 'volume_id': '1e878e2f-2053-4c16-ad0c-263307073b4e', 'name': 'volume-1e878e2f-2053-4c16-ad0c-263307073b4e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '49eb6292-012a-4296-aff8-9c460866a602', 'attached_at': '', 'detached_at': '', 'volume_id': '1e878e2f-2053-4c16-ad0c-263307073b4e', 'serial': '1e878e2f-2053-4c16-ad0c-263307073b4e'}, 'device_type': None, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=63024) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1714.625150] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Root volume attach. Driver type: vmdk {{(pid=63024) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1714.627081] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71b4af93-4257-4f1b-bc45-c9c769a51002 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.635610] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96de1461-16df-491b-aa84-7796631df2d7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.648224] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5715f75b-d337-4055-baff-831ea0815fa4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.656521] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-972d2d14-0893-4cfa-bed7-2ee5e7f9cc85 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.669090] env[63024]: DEBUG oslo_vmware.api [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Waiting for the task: (returnval){ [ 1714.669090] env[63024]: value = "task-1950796" [ 1714.669090] env[63024]: _type = "Task" [ 1714.669090] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1714.686787] env[63024]: DEBUG oslo_vmware.api [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Task: {'id': task-1950796, 'name': RelocateVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1714.706833] env[63024]: DEBUG nova.network.neutron [req-941670fd-9f72-4d18-8a8e-0403c2894403 req-e4ea1c93-3271-4bab-a0c6-5437226798f0 service nova] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1714.828426] env[63024]: DEBUG nova.network.neutron [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Successfully created port: cdff8c9b-4ae6-45d3-8a7b-ef1268f2b42e {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1714.852032] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52b92cbc-5422-408a-a6bc-d08901b6dbd2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.860787] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-692f7a81-20ab-480a-b77d-5c1b4ca9fb56 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.904621] env[63024]: DEBUG oslo_concurrency.lockutils [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Acquiring lock "refresh_cache-f6fddc23-ad36-4d6f-82a2-ded456b2596e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1714.904621] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51f17946-20bf-47a5-99a9-ba353376e146 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.921980] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1da7a652-23bc-4395-9a7c-f5a82b22dd27 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.926310] env[63024]: DEBUG oslo_vmware.api [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950795, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071526} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1714.927897] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1714.931593] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddc146bd-c31e-48b6-abcb-d39eb40c0c34 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.934811] env[63024]: DEBUG oslo_concurrency.lockutils [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Acquiring lock "601a003d-811c-4698-b0b6-054482d32c21" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1714.935045] env[63024]: DEBUG oslo_concurrency.lockutils [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Lock "601a003d-811c-4698-b0b6-054482d32c21" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1714.943650] env[63024]: DEBUG nova.compute.provider_tree [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1714.966440] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Reconfiguring VM instance instance-00000027 to attach disk [datastore1] 650a97b9-911e-44b0-9e82-a6d4cc95c9dd/650a97b9-911e-44b0-9e82-a6d4cc95c9dd.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1714.967268] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9e01f1e6-d323-40a6-8659-ff8080ccb6f7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1714.990394] env[63024]: DEBUG oslo_vmware.api [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Waiting for the task: (returnval){ [ 1714.990394] env[63024]: value = "task-1950797" [ 1714.990394] env[63024]: _type = "Task" [ 1714.990394] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1715.001266] env[63024]: DEBUG oslo_vmware.api [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950797, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1715.179783] env[63024]: DEBUG oslo_vmware.api [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Task: {'id': task-1950796, 'name': RelocateVM_Task, 'duration_secs': 0.4209} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1715.180067] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Volume attach. Driver type: vmdk {{(pid=63024) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1715.180292] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402048', 'volume_id': '1e878e2f-2053-4c16-ad0c-263307073b4e', 'name': 'volume-1e878e2f-2053-4c16-ad0c-263307073b4e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '49eb6292-012a-4296-aff8-9c460866a602', 'attached_at': '', 'detached_at': '', 'volume_id': '1e878e2f-2053-4c16-ad0c-263307073b4e', 'serial': '1e878e2f-2053-4c16-ad0c-263307073b4e'} {{(pid=63024) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1715.181054] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a3ff3d7-41d6-4542-94b2-874adb4bb269 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.198594] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbdaf510-69e7-4365-ad09-1d5307042468 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.213677] env[63024]: DEBUG oslo_concurrency.lockutils [req-941670fd-9f72-4d18-8a8e-0403c2894403 req-e4ea1c93-3271-4bab-a0c6-5437226798f0 service nova] Releasing lock "refresh_cache-f6fddc23-ad36-4d6f-82a2-ded456b2596e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1715.224119] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Reconfiguring VM instance instance-00000028 to attach disk [datastore1] volume-1e878e2f-2053-4c16-ad0c-263307073b4e/volume-1e878e2f-2053-4c16-ad0c-263307073b4e.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1715.224380] env[63024]: DEBUG oslo_concurrency.lockutils [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Acquired lock "refresh_cache-f6fddc23-ad36-4d6f-82a2-ded456b2596e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1715.224538] env[63024]: DEBUG nova.network.neutron [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1715.225742] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4778da28-2d09-4f94-9af2-b5c230a3e163 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.250858] env[63024]: DEBUG oslo_vmware.api [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Waiting for the task: (returnval){ [ 1715.250858] env[63024]: value = "task-1950798" [ 1715.250858] env[63024]: _type = "Task" [ 1715.250858] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1715.262139] env[63024]: DEBUG oslo_vmware.api [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Task: {'id': task-1950798, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1715.356102] env[63024]: DEBUG nova.compute.manager [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1715.387789] env[63024]: DEBUG nova.virt.hardware [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1715.388414] env[63024]: DEBUG nova.virt.hardware [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1715.388414] env[63024]: DEBUG nova.virt.hardware [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1715.388414] env[63024]: DEBUG nova.virt.hardware [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1715.388505] env[63024]: DEBUG nova.virt.hardware [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1715.392019] env[63024]: DEBUG nova.virt.hardware [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1715.392019] env[63024]: DEBUG nova.virt.hardware [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1715.392019] env[63024]: DEBUG nova.virt.hardware [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1715.392019] env[63024]: DEBUG nova.virt.hardware [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1715.392019] env[63024]: DEBUG nova.virt.hardware [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1715.392720] env[63024]: DEBUG nova.virt.hardware [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1715.392720] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4bfbaee-9d9f-46e9-9ef7-b69f478eff4a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.401922] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ece28e0c-e035-4703-befc-ab675111b15b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.424153] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "2dd20650-9273-432a-be28-73ccb66c721d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1715.424436] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "2dd20650-9273-432a-be28-73ccb66c721d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1715.449237] env[63024]: DEBUG nova.scheduler.client.report [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1715.501197] env[63024]: DEBUG oslo_vmware.api [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950797, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1715.767798] env[63024]: DEBUG oslo_vmware.api [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Task: {'id': task-1950798, 'name': ReconfigVM_Task, 'duration_secs': 0.263547} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1715.768575] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Reconfigured VM instance instance-00000028 to attach disk [datastore1] volume-1e878e2f-2053-4c16-ad0c-263307073b4e/volume-1e878e2f-2053-4c16-ad0c-263307073b4e.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1715.774926] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-885a3509-bd7a-4380-be9e-ffd9cdc462ff {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1715.785960] env[63024]: DEBUG nova.network.neutron [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1715.804744] env[63024]: DEBUG oslo_vmware.api [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Waiting for the task: (returnval){ [ 1715.804744] env[63024]: value = "task-1950799" [ 1715.804744] env[63024]: _type = "Task" [ 1715.804744] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1715.818682] env[63024]: DEBUG oslo_vmware.api [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Task: {'id': task-1950799, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1715.953869] env[63024]: DEBUG oslo_concurrency.lockutils [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.644s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1715.954528] env[63024]: DEBUG nova.compute.manager [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1715.957042] env[63024]: DEBUG oslo_concurrency.lockutils [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.963s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1715.958945] env[63024]: INFO nova.compute.claims [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1716.002311] env[63024]: DEBUG oslo_vmware.api [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950797, 'name': ReconfigVM_Task, 'duration_secs': 0.834975} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1716.002311] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Reconfigured VM instance instance-00000027 to attach disk [datastore1] 650a97b9-911e-44b0-9e82-a6d4cc95c9dd/650a97b9-911e-44b0-9e82-a6d4cc95c9dd.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1716.002311] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bea76c09-c046-4ff6-b2a3-7d49e7006b4c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.013249] env[63024]: DEBUG oslo_vmware.api [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Waiting for the task: (returnval){ [ 1716.013249] env[63024]: value = "task-1950800" [ 1716.013249] env[63024]: _type = "Task" [ 1716.013249] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1716.023250] env[63024]: DEBUG oslo_vmware.api [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950800, 'name': Rename_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1716.196168] env[63024]: DEBUG nova.compute.manager [req-e5998f4c-f81e-47a2-a5ea-cbfe9c6ef095 req-d499fabe-6b8d-4256-9df6-74a2b0f3fc99 service nova] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Received event network-vif-plugged-2edee58c-e76b-46ad-b4c8-3b2a70467c01 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1716.196616] env[63024]: DEBUG oslo_concurrency.lockutils [req-e5998f4c-f81e-47a2-a5ea-cbfe9c6ef095 req-d499fabe-6b8d-4256-9df6-74a2b0f3fc99 service nova] Acquiring lock "49eb6292-012a-4296-aff8-9c460866a602-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1716.196921] env[63024]: DEBUG oslo_concurrency.lockutils [req-e5998f4c-f81e-47a2-a5ea-cbfe9c6ef095 req-d499fabe-6b8d-4256-9df6-74a2b0f3fc99 service nova] Lock "49eb6292-012a-4296-aff8-9c460866a602-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1716.197047] env[63024]: DEBUG oslo_concurrency.lockutils [req-e5998f4c-f81e-47a2-a5ea-cbfe9c6ef095 req-d499fabe-6b8d-4256-9df6-74a2b0f3fc99 service nova] Lock "49eb6292-012a-4296-aff8-9c460866a602-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1716.197226] env[63024]: DEBUG nova.compute.manager [req-e5998f4c-f81e-47a2-a5ea-cbfe9c6ef095 req-d499fabe-6b8d-4256-9df6-74a2b0f3fc99 service nova] [instance: 49eb6292-012a-4296-aff8-9c460866a602] No waiting events found dispatching network-vif-plugged-2edee58c-e76b-46ad-b4c8-3b2a70467c01 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1716.197396] env[63024]: WARNING nova.compute.manager [req-e5998f4c-f81e-47a2-a5ea-cbfe9c6ef095 req-d499fabe-6b8d-4256-9df6-74a2b0f3fc99 service nova] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Received unexpected event network-vif-plugged-2edee58c-e76b-46ad-b4c8-3b2a70467c01 for instance with vm_state building and task_state spawning. [ 1716.197607] env[63024]: DEBUG nova.compute.manager [req-e5998f4c-f81e-47a2-a5ea-cbfe9c6ef095 req-d499fabe-6b8d-4256-9df6-74a2b0f3fc99 service nova] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Received event network-changed-2edee58c-e76b-46ad-b4c8-3b2a70467c01 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1716.197910] env[63024]: DEBUG nova.compute.manager [req-e5998f4c-f81e-47a2-a5ea-cbfe9c6ef095 req-d499fabe-6b8d-4256-9df6-74a2b0f3fc99 service nova] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Refreshing instance network info cache due to event network-changed-2edee58c-e76b-46ad-b4c8-3b2a70467c01. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1716.198289] env[63024]: DEBUG oslo_concurrency.lockutils [req-e5998f4c-f81e-47a2-a5ea-cbfe9c6ef095 req-d499fabe-6b8d-4256-9df6-74a2b0f3fc99 service nova] Acquiring lock "refresh_cache-49eb6292-012a-4296-aff8-9c460866a602" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1716.198289] env[63024]: DEBUG oslo_concurrency.lockutils [req-e5998f4c-f81e-47a2-a5ea-cbfe9c6ef095 req-d499fabe-6b8d-4256-9df6-74a2b0f3fc99 service nova] Acquired lock "refresh_cache-49eb6292-012a-4296-aff8-9c460866a602" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1716.198352] env[63024]: DEBUG nova.network.neutron [req-e5998f4c-f81e-47a2-a5ea-cbfe9c6ef095 req-d499fabe-6b8d-4256-9df6-74a2b0f3fc99 service nova] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Refreshing network info cache for port 2edee58c-e76b-46ad-b4c8-3b2a70467c01 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1716.247822] env[63024]: DEBUG nova.network.neutron [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Updating instance_info_cache with network_info: [{"id": "52fd1b1a-cd75-4bd3-967c-b98213510df1", "address": "fa:16:3e:0f:d1:6c", "network": {"id": "bf61c63a-4a12-4a43-ad5e-c26b1e04e50d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-248019057", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.143", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "674f344eaf784662ac922405620a3ac4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52fd1b1a-cd", "ovs_interfaceid": "52fd1b1a-cd75-4bd3-967c-b98213510df1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e82533e3-2173-4dc5-911c-829fa32117ad", "address": "fa:16:3e:42:f3:f5", "network": {"id": "a0abed7c-d870-45e2-be33-85de71d6cc56", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1905076612", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.235", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "674f344eaf784662ac922405620a3ac4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1520c99-af74-4d61-a8ae-56aef56ef4f0", "external-id": "nsx-vlan-transportzone-891", "segmentation_id": 891, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape82533e3-21", "ovs_interfaceid": "e82533e3-2173-4dc5-911c-829fa32117ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "7f7d9f32-5f3e-4ba0-afc0-270579e87766", "address": "fa:16:3e:eb:30:a4", "network": {"id": "bf61c63a-4a12-4a43-ad5e-c26b1e04e50d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-248019057", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.109", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "674f344eaf784662ac922405620a3ac4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f7d9f32-5f", "ovs_interfaceid": "7f7d9f32-5f3e-4ba0-afc0-270579e87766", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1716.319274] env[63024]: DEBUG oslo_vmware.api [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Task: {'id': task-1950799, 'name': ReconfigVM_Task, 'duration_secs': 0.134813} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1716.319692] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402048', 'volume_id': '1e878e2f-2053-4c16-ad0c-263307073b4e', 'name': 'volume-1e878e2f-2053-4c16-ad0c-263307073b4e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '49eb6292-012a-4296-aff8-9c460866a602', 'attached_at': '', 'detached_at': '', 'volume_id': '1e878e2f-2053-4c16-ad0c-263307073b4e', 'serial': '1e878e2f-2053-4c16-ad0c-263307073b4e'} {{(pid=63024) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1716.320436] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ce80635f-9c53-4470-8de2-102d73259d90 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.329268] env[63024]: DEBUG oslo_vmware.api [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Waiting for the task: (returnval){ [ 1716.329268] env[63024]: value = "task-1950801" [ 1716.329268] env[63024]: _type = "Task" [ 1716.329268] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1716.340793] env[63024]: DEBUG oslo_vmware.api [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Task: {'id': task-1950801, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1716.394450] env[63024]: DEBUG nova.network.neutron [-] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1716.466022] env[63024]: DEBUG nova.compute.utils [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1716.467436] env[63024]: DEBUG nova.compute.manager [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1716.467635] env[63024]: DEBUG nova.network.neutron [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1716.510110] env[63024]: DEBUG nova.policy [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '893bfe0d8eef423aae6c7eb5cdc1a9e9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '18540818b60e4483963d14559bc5c38d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1716.525115] env[63024]: DEBUG oslo_vmware.api [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950800, 'name': Rename_Task, 'duration_secs': 0.347455} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1716.525183] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1716.525388] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ffa92da7-0715-4729-a012-4f62409ff518 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.533455] env[63024]: DEBUG oslo_vmware.api [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Waiting for the task: (returnval){ [ 1716.533455] env[63024]: value = "task-1950802" [ 1716.533455] env[63024]: _type = "Task" [ 1716.533455] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1716.542737] env[63024]: DEBUG oslo_vmware.api [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950802, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1716.553111] env[63024]: DEBUG nova.compute.manager [req-bdb9e0b0-14be-420d-9242-fae996f43338 req-f038744a-8218-46df-8d37-6e0a89f2d65f service nova] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Received event network-vif-plugged-7f7d9f32-5f3e-4ba0-afc0-270579e87766 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1716.553304] env[63024]: DEBUG oslo_concurrency.lockutils [req-bdb9e0b0-14be-420d-9242-fae996f43338 req-f038744a-8218-46df-8d37-6e0a89f2d65f service nova] Acquiring lock "f6fddc23-ad36-4d6f-82a2-ded456b2596e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1716.553586] env[63024]: DEBUG oslo_concurrency.lockutils [req-bdb9e0b0-14be-420d-9242-fae996f43338 req-f038744a-8218-46df-8d37-6e0a89f2d65f service nova] Lock "f6fddc23-ad36-4d6f-82a2-ded456b2596e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1716.554807] env[63024]: DEBUG oslo_concurrency.lockutils [req-bdb9e0b0-14be-420d-9242-fae996f43338 req-f038744a-8218-46df-8d37-6e0a89f2d65f service nova] Lock "f6fddc23-ad36-4d6f-82a2-ded456b2596e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1716.554807] env[63024]: DEBUG nova.compute.manager [req-bdb9e0b0-14be-420d-9242-fae996f43338 req-f038744a-8218-46df-8d37-6e0a89f2d65f service nova] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] No waiting events found dispatching network-vif-plugged-7f7d9f32-5f3e-4ba0-afc0-270579e87766 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1716.554807] env[63024]: WARNING nova.compute.manager [req-bdb9e0b0-14be-420d-9242-fae996f43338 req-f038744a-8218-46df-8d37-6e0a89f2d65f service nova] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Received unexpected event network-vif-plugged-7f7d9f32-5f3e-4ba0-afc0-270579e87766 for instance with vm_state building and task_state spawning. [ 1716.554807] env[63024]: DEBUG nova.compute.manager [req-bdb9e0b0-14be-420d-9242-fae996f43338 req-f038744a-8218-46df-8d37-6e0a89f2d65f service nova] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Received event network-changed-7f7d9f32-5f3e-4ba0-afc0-270579e87766 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1716.554807] env[63024]: DEBUG nova.compute.manager [req-bdb9e0b0-14be-420d-9242-fae996f43338 req-f038744a-8218-46df-8d37-6e0a89f2d65f service nova] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Refreshing instance network info cache due to event network-changed-7f7d9f32-5f3e-4ba0-afc0-270579e87766. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1716.554807] env[63024]: DEBUG oslo_concurrency.lockutils [req-bdb9e0b0-14be-420d-9242-fae996f43338 req-f038744a-8218-46df-8d37-6e0a89f2d65f service nova] Acquiring lock "refresh_cache-f6fddc23-ad36-4d6f-82a2-ded456b2596e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1716.756020] env[63024]: DEBUG oslo_concurrency.lockutils [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Releasing lock "refresh_cache-f6fddc23-ad36-4d6f-82a2-ded456b2596e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1716.756020] env[63024]: DEBUG nova.compute.manager [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Instance network_info: |[{"id": "52fd1b1a-cd75-4bd3-967c-b98213510df1", "address": "fa:16:3e:0f:d1:6c", "network": {"id": "bf61c63a-4a12-4a43-ad5e-c26b1e04e50d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-248019057", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.143", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "674f344eaf784662ac922405620a3ac4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52fd1b1a-cd", "ovs_interfaceid": "52fd1b1a-cd75-4bd3-967c-b98213510df1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e82533e3-2173-4dc5-911c-829fa32117ad", "address": "fa:16:3e:42:f3:f5", "network": {"id": "a0abed7c-d870-45e2-be33-85de71d6cc56", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1905076612", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.235", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "674f344eaf784662ac922405620a3ac4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1520c99-af74-4d61-a8ae-56aef56ef4f0", "external-id": "nsx-vlan-transportzone-891", "segmentation_id": 891, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape82533e3-21", "ovs_interfaceid": "e82533e3-2173-4dc5-911c-829fa32117ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "7f7d9f32-5f3e-4ba0-afc0-270579e87766", "address": "fa:16:3e:eb:30:a4", "network": {"id": "bf61c63a-4a12-4a43-ad5e-c26b1e04e50d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-248019057", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.109", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "674f344eaf784662ac922405620a3ac4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f7d9f32-5f", "ovs_interfaceid": "7f7d9f32-5f3e-4ba0-afc0-270579e87766", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1716.756020] env[63024]: DEBUG oslo_concurrency.lockutils [req-bdb9e0b0-14be-420d-9242-fae996f43338 req-f038744a-8218-46df-8d37-6e0a89f2d65f service nova] Acquired lock "refresh_cache-f6fddc23-ad36-4d6f-82a2-ded456b2596e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1716.756020] env[63024]: DEBUG nova.network.neutron [req-bdb9e0b0-14be-420d-9242-fae996f43338 req-f038744a-8218-46df-8d37-6e0a89f2d65f service nova] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Refreshing network info cache for port 7f7d9f32-5f3e-4ba0-afc0-270579e87766 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1716.756313] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0f:d1:6c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '790c811b-3fa6-49f8-87ac-c51450911137', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '52fd1b1a-cd75-4bd3-967c-b98213510df1', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:42:f3:f5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c1520c99-af74-4d61-a8ae-56aef56ef4f0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e82533e3-2173-4dc5-911c-829fa32117ad', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:eb:30:a4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '790c811b-3fa6-49f8-87ac-c51450911137', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7f7d9f32-5f3e-4ba0-afc0-270579e87766', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1716.767974] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Creating folder: Project (674f344eaf784662ac922405620a3ac4). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1716.773325] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-732db493-c204-416d-bf86-279f6cd65808 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.810846] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Created folder: Project (674f344eaf784662ac922405620a3ac4) in parent group-v401959. [ 1716.810846] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Creating folder: Instances. Parent ref: group-v402087. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1716.810846] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-516b5358-b7e9-426d-83db-b23f8a986f3c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.823015] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Created folder: Instances in parent group-v402087. [ 1716.823015] env[63024]: DEBUG oslo.service.loopingcall [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1716.823015] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1716.823015] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-15780892-37ad-47d0-8f19-be4c19be4fb6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.854796] env[63024]: DEBUG oslo_vmware.api [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Task: {'id': task-1950801, 'name': Rename_Task, 'duration_secs': 0.136627} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1716.856262] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1716.856682] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1716.856682] env[63024]: value = "task-1950805" [ 1716.856682] env[63024]: _type = "Task" [ 1716.856682] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1716.857070] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5db9cef7-c27b-473a-9795-793705a2fa1f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1716.871234] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950805, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1716.872458] env[63024]: DEBUG oslo_vmware.api [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Waiting for the task: (returnval){ [ 1716.872458] env[63024]: value = "task-1950806" [ 1716.872458] env[63024]: _type = "Task" [ 1716.872458] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1716.887278] env[63024]: DEBUG oslo_vmware.api [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Task: {'id': task-1950806, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1716.898931] env[63024]: INFO nova.compute.manager [-] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Took 2.37 seconds to deallocate network for instance. [ 1716.972928] env[63024]: DEBUG nova.compute.manager [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1716.984771] env[63024]: DEBUG nova.network.neutron [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Successfully created port: d9f698cf-c7f2-403c-92db-98c7ef61b086 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1717.055993] env[63024]: DEBUG oslo_vmware.api [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950802, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.373628] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950805, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.392061] env[63024]: DEBUG oslo_vmware.api [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Task: {'id': task-1950806, 'name': PowerOnVM_Task, 'duration_secs': 0.463186} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1717.393029] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1717.393249] env[63024]: INFO nova.compute.manager [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Took 3.95 seconds to spawn the instance on the hypervisor. [ 1717.393563] env[63024]: DEBUG nova.compute.manager [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1717.394630] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b40af5d-28af-4487-8874-d3e7bccd3f3b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.409701] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1717.467076] env[63024]: DEBUG nova.network.neutron [req-e5998f4c-f81e-47a2-a5ea-cbfe9c6ef095 req-d499fabe-6b8d-4256-9df6-74a2b0f3fc99 service nova] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Updated VIF entry in instance network info cache for port 2edee58c-e76b-46ad-b4c8-3b2a70467c01. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1717.467117] env[63024]: DEBUG nova.network.neutron [req-e5998f4c-f81e-47a2-a5ea-cbfe9c6ef095 req-d499fabe-6b8d-4256-9df6-74a2b0f3fc99 service nova] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Updating instance_info_cache with network_info: [{"id": "2edee58c-e76b-46ad-b4c8-3b2a70467c01", "address": "fa:16:3e:1a:71:e7", "network": {"id": "c16b8a08-a304-46c8-bf65-ad5caee54acc", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-528457489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ad3f57a3e2224175812d7816ea5327fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2edee58c-e7", "ovs_interfaceid": "2edee58c-e76b-46ad-b4c8-3b2a70467c01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1717.496386] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6cbf653-aa91-4622-8323-16bbefca2db9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.505069] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71a488f8-1042-4ec8-aafa-7daca094af78 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.541730] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54fb6894-da9b-48b4-acc1-725bfb0ed1f0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.554256] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eb00fa7-b264-4e7d-90e7-d81ca53f7048 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.558279] env[63024]: DEBUG oslo_vmware.api [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950802, 'name': PowerOnVM_Task, 'duration_secs': 0.848362} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1717.559192] env[63024]: DEBUG nova.network.neutron [req-bdb9e0b0-14be-420d-9242-fae996f43338 req-f038744a-8218-46df-8d37-6e0a89f2d65f service nova] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Updated VIF entry in instance network info cache for port 7f7d9f32-5f3e-4ba0-afc0-270579e87766. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1717.559704] env[63024]: DEBUG nova.network.neutron [req-bdb9e0b0-14be-420d-9242-fae996f43338 req-f038744a-8218-46df-8d37-6e0a89f2d65f service nova] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Updating instance_info_cache with network_info: [{"id": "52fd1b1a-cd75-4bd3-967c-b98213510df1", "address": "fa:16:3e:0f:d1:6c", "network": {"id": "bf61c63a-4a12-4a43-ad5e-c26b1e04e50d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-248019057", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.143", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "674f344eaf784662ac922405620a3ac4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap52fd1b1a-cd", "ovs_interfaceid": "52fd1b1a-cd75-4bd3-967c-b98213510df1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e82533e3-2173-4dc5-911c-829fa32117ad", "address": "fa:16:3e:42:f3:f5", "network": {"id": "a0abed7c-d870-45e2-be33-85de71d6cc56", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1905076612", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.235", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "674f344eaf784662ac922405620a3ac4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c1520c99-af74-4d61-a8ae-56aef56ef4f0", "external-id": "nsx-vlan-transportzone-891", "segmentation_id": 891, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape82533e3-21", "ovs_interfaceid": "e82533e3-2173-4dc5-911c-829fa32117ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "7f7d9f32-5f3e-4ba0-afc0-270579e87766", "address": "fa:16:3e:eb:30:a4", "network": {"id": "bf61c63a-4a12-4a43-ad5e-c26b1e04e50d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-248019057", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.109", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "674f344eaf784662ac922405620a3ac4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "790c811b-3fa6-49f8-87ac-c51450911137", "external-id": "nsx-vlan-transportzone-908", "segmentation_id": 908, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7f7d9f32-5f", "ovs_interfaceid": "7f7d9f32-5f3e-4ba0-afc0-270579e87766", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1717.561327] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1717.561627] env[63024]: INFO nova.compute.manager [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Took 8.65 seconds to spawn the instance on the hypervisor. [ 1717.561856] env[63024]: DEBUG nova.compute.manager [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1717.563321] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-509841a4-6607-4b8b-a117-52d5e8d60af8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.574365] env[63024]: DEBUG nova.compute.provider_tree [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1717.740052] env[63024]: DEBUG nova.network.neutron [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Successfully updated port: cdff8c9b-4ae6-45d3-8a7b-ef1268f2b42e {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1717.871374] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950805, 'name': CreateVM_Task, 'duration_secs': 0.555501} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1717.871563] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1717.872491] env[63024]: DEBUG oslo_concurrency.lockutils [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1717.872658] env[63024]: DEBUG oslo_concurrency.lockutils [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1717.872972] env[63024]: DEBUG oslo_concurrency.lockutils [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1717.873304] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-631655f0-38bb-4c8f-8388-762b4b230450 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1717.878373] env[63024]: DEBUG oslo_vmware.api [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Waiting for the task: (returnval){ [ 1717.878373] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52c3ee18-2fb5-5110-14ad-55d506d9fc2f" [ 1717.878373] env[63024]: _type = "Task" [ 1717.878373] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1717.886918] env[63024]: DEBUG oslo_vmware.api [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52c3ee18-2fb5-5110-14ad-55d506d9fc2f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1717.922145] env[63024]: INFO nova.compute.manager [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Took 47.69 seconds to build instance. [ 1717.970022] env[63024]: DEBUG oslo_concurrency.lockutils [req-e5998f4c-f81e-47a2-a5ea-cbfe9c6ef095 req-d499fabe-6b8d-4256-9df6-74a2b0f3fc99 service nova] Releasing lock "refresh_cache-49eb6292-012a-4296-aff8-9c460866a602" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1717.970501] env[63024]: DEBUG nova.compute.manager [req-e5998f4c-f81e-47a2-a5ea-cbfe9c6ef095 req-d499fabe-6b8d-4256-9df6-74a2b0f3fc99 service nova] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Received event network-changed-67b8a4fd-320c-4178-ab08-8bbe9fb878ba {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1717.970605] env[63024]: DEBUG nova.compute.manager [req-e5998f4c-f81e-47a2-a5ea-cbfe9c6ef095 req-d499fabe-6b8d-4256-9df6-74a2b0f3fc99 service nova] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Refreshing instance network info cache due to event network-changed-67b8a4fd-320c-4178-ab08-8bbe9fb878ba. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1717.970745] env[63024]: DEBUG oslo_concurrency.lockutils [req-e5998f4c-f81e-47a2-a5ea-cbfe9c6ef095 req-d499fabe-6b8d-4256-9df6-74a2b0f3fc99 service nova] Acquiring lock "refresh_cache-9679a1a2-b003-4a60-a812-8b3a9b5f545f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1717.970946] env[63024]: DEBUG oslo_concurrency.lockutils [req-e5998f4c-f81e-47a2-a5ea-cbfe9c6ef095 req-d499fabe-6b8d-4256-9df6-74a2b0f3fc99 service nova] Acquired lock "refresh_cache-9679a1a2-b003-4a60-a812-8b3a9b5f545f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1717.971044] env[63024]: DEBUG nova.network.neutron [req-e5998f4c-f81e-47a2-a5ea-cbfe9c6ef095 req-d499fabe-6b8d-4256-9df6-74a2b0f3fc99 service nova] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Refreshing network info cache for port 67b8a4fd-320c-4178-ab08-8bbe9fb878ba {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1717.981923] env[63024]: DEBUG nova.compute.manager [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1718.007055] env[63024]: DEBUG nova.virt.hardware [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1718.007312] env[63024]: DEBUG nova.virt.hardware [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1718.007467] env[63024]: DEBUG nova.virt.hardware [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1718.007651] env[63024]: DEBUG nova.virt.hardware [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1718.007791] env[63024]: DEBUG nova.virt.hardware [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1718.007935] env[63024]: DEBUG nova.virt.hardware [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1718.008444] env[63024]: DEBUG nova.virt.hardware [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1718.008444] env[63024]: DEBUG nova.virt.hardware [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1718.008542] env[63024]: DEBUG nova.virt.hardware [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1718.008659] env[63024]: DEBUG nova.virt.hardware [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1718.008826] env[63024]: DEBUG nova.virt.hardware [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1718.009675] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24f810bf-a442-418c-8425-648cf51c791e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.017929] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c778fd7-912a-473b-8c2f-e68408c125c8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.063701] env[63024]: DEBUG oslo_concurrency.lockutils [req-bdb9e0b0-14be-420d-9242-fae996f43338 req-f038744a-8218-46df-8d37-6e0a89f2d65f service nova] Releasing lock "refresh_cache-f6fddc23-ad36-4d6f-82a2-ded456b2596e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1718.063987] env[63024]: DEBUG nova.compute.manager [req-bdb9e0b0-14be-420d-9242-fae996f43338 req-f038744a-8218-46df-8d37-6e0a89f2d65f service nova] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Received event network-vif-deleted-36bc0224-4621-4b20-b039-244da00236ac {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1718.078236] env[63024]: DEBUG nova.scheduler.client.report [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1718.097597] env[63024]: INFO nova.compute.manager [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Took 49.63 seconds to build instance. [ 1718.243433] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "refresh_cache-8edc24d6-9073-4836-b14b-422df3ac1b88" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1718.244129] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquired lock "refresh_cache-8edc24d6-9073-4836-b14b-422df3ac1b88" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1718.244129] env[63024]: DEBUG nova.network.neutron [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1718.390459] env[63024]: DEBUG oslo_vmware.api [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52c3ee18-2fb5-5110-14ad-55d506d9fc2f, 'name': SearchDatastore_Task, 'duration_secs': 0.022498} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1718.390781] env[63024]: DEBUG oslo_concurrency.lockutils [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1718.391128] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1718.391453] env[63024]: DEBUG oslo_concurrency.lockutils [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1718.391611] env[63024]: DEBUG oslo_concurrency.lockutils [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1718.391792] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1718.392118] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-697557fe-3b3d-4a3d-9484-e5f0a94fba8f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.406496] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1718.406496] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1718.406496] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f36b565e-2bea-43b9-8572-c2a46dd5787e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.413086] env[63024]: DEBUG oslo_vmware.api [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Waiting for the task: (returnval){ [ 1718.413086] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52dcc996-9730-562f-b467-b67ae5f5cf24" [ 1718.413086] env[63024]: _type = "Task" [ 1718.413086] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1718.422567] env[63024]: DEBUG oslo_vmware.api [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52dcc996-9730-562f-b467-b67ae5f5cf24, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1718.424095] env[63024]: DEBUG oslo_concurrency.lockutils [None req-00f95c73-7b04-4f75-834a-8b7c927eeb4e tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Lock "49eb6292-012a-4296-aff8-9c460866a602" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.196s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1718.543765] env[63024]: DEBUG nova.network.neutron [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Successfully updated port: d9f698cf-c7f2-403c-92db-98c7ef61b086 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1718.587088] env[63024]: DEBUG oslo_concurrency.lockutils [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.630s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1718.587652] env[63024]: DEBUG nova.compute.manager [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1718.592904] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.569s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1718.592904] env[63024]: INFO nova.compute.claims [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 37792b57-3347-4134-a060-53359afa3298] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1718.598775] env[63024]: DEBUG nova.compute.manager [req-f83be66d-f0e2-45ca-8ec9-f1b706bb218f req-e3f14f26-9fed-4fe5-ae3b-ba16710cf5af service nova] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Received event network-vif-plugged-d9f698cf-c7f2-403c-92db-98c7ef61b086 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1718.598775] env[63024]: DEBUG oslo_concurrency.lockutils [req-f83be66d-f0e2-45ca-8ec9-f1b706bb218f req-e3f14f26-9fed-4fe5-ae3b-ba16710cf5af service nova] Acquiring lock "df2933d1-32c3-48a6-8ceb-d5e3047d0b78-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1718.598920] env[63024]: DEBUG oslo_concurrency.lockutils [req-f83be66d-f0e2-45ca-8ec9-f1b706bb218f req-e3f14f26-9fed-4fe5-ae3b-ba16710cf5af service nova] Lock "df2933d1-32c3-48a6-8ceb-d5e3047d0b78-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1718.599067] env[63024]: DEBUG oslo_concurrency.lockutils [req-f83be66d-f0e2-45ca-8ec9-f1b706bb218f req-e3f14f26-9fed-4fe5-ae3b-ba16710cf5af service nova] Lock "df2933d1-32c3-48a6-8ceb-d5e3047d0b78-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1718.599238] env[63024]: DEBUG nova.compute.manager [req-f83be66d-f0e2-45ca-8ec9-f1b706bb218f req-e3f14f26-9fed-4fe5-ae3b-ba16710cf5af service nova] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] No waiting events found dispatching network-vif-plugged-d9f698cf-c7f2-403c-92db-98c7ef61b086 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1718.599394] env[63024]: WARNING nova.compute.manager [req-f83be66d-f0e2-45ca-8ec9-f1b706bb218f req-e3f14f26-9fed-4fe5-ae3b-ba16710cf5af service nova] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Received unexpected event network-vif-plugged-d9f698cf-c7f2-403c-92db-98c7ef61b086 for instance with vm_state building and task_state spawning. [ 1718.599815] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5e2a851f-0d09-45e0-af31-b9a2e6b8ca41 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Lock "650a97b9-911e-44b0-9e82-a6d4cc95c9dd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.150s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1718.794100] env[63024]: DEBUG nova.network.neutron [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1718.806026] env[63024]: DEBUG nova.compute.manager [req-e8655de2-64bb-4244-a348-1bfe25124544 req-cf942e11-9cc6-4ef4-acb7-f695af131a07 service nova] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Received event network-vif-plugged-cdff8c9b-4ae6-45d3-8a7b-ef1268f2b42e {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1718.806272] env[63024]: DEBUG oslo_concurrency.lockutils [req-e8655de2-64bb-4244-a348-1bfe25124544 req-cf942e11-9cc6-4ef4-acb7-f695af131a07 service nova] Acquiring lock "8edc24d6-9073-4836-b14b-422df3ac1b88-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1718.806482] env[63024]: DEBUG oslo_concurrency.lockutils [req-e8655de2-64bb-4244-a348-1bfe25124544 req-cf942e11-9cc6-4ef4-acb7-f695af131a07 service nova] Lock "8edc24d6-9073-4836-b14b-422df3ac1b88-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1718.806643] env[63024]: DEBUG oslo_concurrency.lockutils [req-e8655de2-64bb-4244-a348-1bfe25124544 req-cf942e11-9cc6-4ef4-acb7-f695af131a07 service nova] Lock "8edc24d6-9073-4836-b14b-422df3ac1b88-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1718.806802] env[63024]: DEBUG nova.compute.manager [req-e8655de2-64bb-4244-a348-1bfe25124544 req-cf942e11-9cc6-4ef4-acb7-f695af131a07 service nova] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] No waiting events found dispatching network-vif-plugged-cdff8c9b-4ae6-45d3-8a7b-ef1268f2b42e {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1718.806957] env[63024]: WARNING nova.compute.manager [req-e8655de2-64bb-4244-a348-1bfe25124544 req-cf942e11-9cc6-4ef4-acb7-f695af131a07 service nova] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Received unexpected event network-vif-plugged-cdff8c9b-4ae6-45d3-8a7b-ef1268f2b42e for instance with vm_state building and task_state spawning. [ 1718.807630] env[63024]: DEBUG nova.compute.manager [req-e8655de2-64bb-4244-a348-1bfe25124544 req-cf942e11-9cc6-4ef4-acb7-f695af131a07 service nova] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Received event network-changed-cdff8c9b-4ae6-45d3-8a7b-ef1268f2b42e {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1718.807819] env[63024]: DEBUG nova.compute.manager [req-e8655de2-64bb-4244-a348-1bfe25124544 req-cf942e11-9cc6-4ef4-acb7-f695af131a07 service nova] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Refreshing instance network info cache due to event network-changed-cdff8c9b-4ae6-45d3-8a7b-ef1268f2b42e. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1718.807993] env[63024]: DEBUG oslo_concurrency.lockutils [req-e8655de2-64bb-4244-a348-1bfe25124544 req-cf942e11-9cc6-4ef4-acb7-f695af131a07 service nova] Acquiring lock "refresh_cache-8edc24d6-9073-4836-b14b-422df3ac1b88" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1718.924129] env[63024]: DEBUG oslo_vmware.api [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52dcc996-9730-562f-b467-b67ae5f5cf24, 'name': SearchDatastore_Task, 'duration_secs': 0.015669} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1718.924935] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc12e37e-fdfc-48e9-b82d-2f284905371a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1718.927286] env[63024]: DEBUG nova.compute.manager [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1718.937199] env[63024]: DEBUG oslo_vmware.api [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Waiting for the task: (returnval){ [ 1718.937199] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5226519f-669f-149e-8f6a-b37c3536aece" [ 1718.937199] env[63024]: _type = "Task" [ 1718.937199] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1718.942914] env[63024]: DEBUG oslo_vmware.api [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5226519f-669f-149e-8f6a-b37c3536aece, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.035727] env[63024]: DEBUG nova.network.neutron [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Updating instance_info_cache with network_info: [{"id": "cdff8c9b-4ae6-45d3-8a7b-ef1268f2b42e", "address": "fa:16:3e:2c:08:94", "network": {"id": "67ab6461-8fa5-4dfd-9267-561a710ae91b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1814728209-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1886be852b01400aaf7a31c8fe5d4d7a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcdff8c9b-4a", "ovs_interfaceid": "cdff8c9b-4ae6-45d3-8a7b-ef1268f2b42e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1719.047937] env[63024]: DEBUG oslo_concurrency.lockutils [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "refresh_cache-df2933d1-32c3-48a6-8ceb-d5e3047d0b78" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1719.048119] env[63024]: DEBUG oslo_concurrency.lockutils [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquired lock "refresh_cache-df2933d1-32c3-48a6-8ceb-d5e3047d0b78" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1719.048238] env[63024]: DEBUG nova.network.neutron [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1719.098572] env[63024]: DEBUG nova.compute.utils [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1719.105299] env[63024]: DEBUG nova.compute.manager [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1719.105299] env[63024]: DEBUG nova.network.neutron [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1719.106842] env[63024]: DEBUG nova.compute.manager [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1719.177827] env[63024]: DEBUG nova.network.neutron [req-e5998f4c-f81e-47a2-a5ea-cbfe9c6ef095 req-d499fabe-6b8d-4256-9df6-74a2b0f3fc99 service nova] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Updated VIF entry in instance network info cache for port 67b8a4fd-320c-4178-ab08-8bbe9fb878ba. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1719.178755] env[63024]: DEBUG nova.network.neutron [req-e5998f4c-f81e-47a2-a5ea-cbfe9c6ef095 req-d499fabe-6b8d-4256-9df6-74a2b0f3fc99 service nova] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Updating instance_info_cache with network_info: [{"id": "67b8a4fd-320c-4178-ab08-8bbe9fb878ba", "address": "fa:16:3e:bb:35:36", "network": {"id": "b4353b4e-cb5e-42f6-979d-a0622ed453a3", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-1437328880-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.213", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c7ed27976af940448e1017ee9c572fa9", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "93341b73-918c-4e9d-9c66-ca171a54b574", "external-id": "nsx-vlan-transportzone-663", "segmentation_id": 663, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67b8a4fd-32", "ovs_interfaceid": "67b8a4fd-320c-4178-ab08-8bbe9fb878ba", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1719.182879] env[63024]: DEBUG nova.policy [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '08ddbcf65df7401dafe0e698f5374ff0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '30be8d2fefa341efb89aad6f645d79f1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1719.449151] env[63024]: DEBUG oslo_vmware.api [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5226519f-669f-149e-8f6a-b37c3536aece, 'name': SearchDatastore_Task, 'duration_secs': 0.010851} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1719.449447] env[63024]: DEBUG oslo_concurrency.lockutils [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1719.449703] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] f6fddc23-ad36-4d6f-82a2-ded456b2596e/f6fddc23-ad36-4d6f-82a2-ded456b2596e.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1719.449963] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2bbc60bc-eb50-44ef-a28f-85d12ef1a226 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.458164] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1719.460479] env[63024]: DEBUG oslo_vmware.api [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Waiting for the task: (returnval){ [ 1719.460479] env[63024]: value = "task-1950807" [ 1719.460479] env[63024]: _type = "Task" [ 1719.460479] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1719.469924] env[63024]: DEBUG oslo_vmware.api [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Task: {'id': task-1950807, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.537719] env[63024]: DEBUG nova.network.neutron [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] Successfully created port: bc1da74a-2cfe-4e8e-9682-78541f185723 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1719.541173] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Releasing lock "refresh_cache-8edc24d6-9073-4836-b14b-422df3ac1b88" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1719.541332] env[63024]: DEBUG nova.compute.manager [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Instance network_info: |[{"id": "cdff8c9b-4ae6-45d3-8a7b-ef1268f2b42e", "address": "fa:16:3e:2c:08:94", "network": {"id": "67ab6461-8fa5-4dfd-9267-561a710ae91b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1814728209-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1886be852b01400aaf7a31c8fe5d4d7a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcdff8c9b-4a", "ovs_interfaceid": "cdff8c9b-4ae6-45d3-8a7b-ef1268f2b42e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1719.541608] env[63024]: DEBUG oslo_concurrency.lockutils [req-e8655de2-64bb-4244-a348-1bfe25124544 req-cf942e11-9cc6-4ef4-acb7-f695af131a07 service nova] Acquired lock "refresh_cache-8edc24d6-9073-4836-b14b-422df3ac1b88" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1719.541815] env[63024]: DEBUG nova.network.neutron [req-e8655de2-64bb-4244-a348-1bfe25124544 req-cf942e11-9cc6-4ef4-acb7-f695af131a07 service nova] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Refreshing network info cache for port cdff8c9b-4ae6-45d3-8a7b-ef1268f2b42e {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1719.543074] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2c:08:94', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'abe48956-848a-4e1f-b1f1-a27baa5390b9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cdff8c9b-4ae6-45d3-8a7b-ef1268f2b42e', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1719.550778] env[63024]: DEBUG oslo.service.loopingcall [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1719.555619] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1719.558583] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d4969677-37f0-4227-960d-07850372d78d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.579500] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1719.579500] env[63024]: value = "task-1950808" [ 1719.579500] env[63024]: _type = "Task" [ 1719.579500] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1719.588746] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950808, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1719.603904] env[63024]: DEBUG nova.compute.manager [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1719.633362] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1719.655875] env[63024]: DEBUG nova.network.neutron [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1719.682772] env[63024]: DEBUG oslo_concurrency.lockutils [req-e5998f4c-f81e-47a2-a5ea-cbfe9c6ef095 req-d499fabe-6b8d-4256-9df6-74a2b0f3fc99 service nova] Releasing lock "refresh_cache-9679a1a2-b003-4a60-a812-8b3a9b5f545f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1719.981886] env[63024]: DEBUG oslo_vmware.api [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Task: {'id': task-1950807, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.513079} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1719.982313] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] f6fddc23-ad36-4d6f-82a2-ded456b2596e/f6fddc23-ad36-4d6f-82a2-ded456b2596e.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1719.982460] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1719.982980] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-97854d78-c632-4d27-b7a7-24436e7589ef {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1719.993664] env[63024]: DEBUG oslo_vmware.api [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Waiting for the task: (returnval){ [ 1719.993664] env[63024]: value = "task-1950809" [ 1719.993664] env[63024]: _type = "Task" [ 1719.993664] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1720.002861] env[63024]: DEBUG oslo_vmware.api [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Task: {'id': task-1950809, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.094573] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950808, 'name': CreateVM_Task, 'duration_secs': 0.505529} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1720.094747] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1720.095406] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1720.095567] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1720.095870] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1720.096449] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad1018c1-7601-4ac8-bc00-a708e0a7eaba {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.100593] env[63024]: DEBUG nova.network.neutron [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Updating instance_info_cache with network_info: [{"id": "d9f698cf-c7f2-403c-92db-98c7ef61b086", "address": "fa:16:3e:77:77:c8", "network": {"id": "b800daa8-f0f9-4823-92b0-ff8d853cc0ff", "bridge": "br-int", "label": "tempest-ServersTestJSON-1406445940-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18540818b60e4483963d14559bc5c38d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec763be6-4041-4651-8fd7-3820cf0ab86d", "external-id": "nsx-vlan-transportzone-943", "segmentation_id": 943, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9f698cf-c7", "ovs_interfaceid": "d9f698cf-c7f2-403c-92db-98c7ef61b086", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1720.103017] env[63024]: DEBUG oslo_vmware.api [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1720.103017] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52edca75-1d47-7961-c790-3d95e01ddf42" [ 1720.103017] env[63024]: _type = "Task" [ 1720.103017] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1720.120107] env[63024]: DEBUG oslo_vmware.api [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52edca75-1d47-7961-c790-3d95e01ddf42, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.191291] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0d51645-d1b3-46e1-a72d-323c19e8875e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.200457] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1b74961-017c-47eb-baad-445400f2fe36 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.209089] env[63024]: DEBUG nova.network.neutron [req-e8655de2-64bb-4244-a348-1bfe25124544 req-cf942e11-9cc6-4ef4-acb7-f695af131a07 service nova] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Updated VIF entry in instance network info cache for port cdff8c9b-4ae6-45d3-8a7b-ef1268f2b42e. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1720.209089] env[63024]: DEBUG nova.network.neutron [req-e8655de2-64bb-4244-a348-1bfe25124544 req-cf942e11-9cc6-4ef4-acb7-f695af131a07 service nova] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Updating instance_info_cache with network_info: [{"id": "cdff8c9b-4ae6-45d3-8a7b-ef1268f2b42e", "address": "fa:16:3e:2c:08:94", "network": {"id": "67ab6461-8fa5-4dfd-9267-561a710ae91b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1814728209-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1886be852b01400aaf7a31c8fe5d4d7a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcdff8c9b-4a", "ovs_interfaceid": "cdff8c9b-4ae6-45d3-8a7b-ef1268f2b42e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1720.239762] env[63024]: DEBUG oslo_concurrency.lockutils [req-e8655de2-64bb-4244-a348-1bfe25124544 req-cf942e11-9cc6-4ef4-acb7-f695af131a07 service nova] Releasing lock "refresh_cache-8edc24d6-9073-4836-b14b-422df3ac1b88" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1720.240805] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8238e2a1-c736-410d-b0ec-bc7643f28ce6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.250882] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7de7129a-d7e4-4d83-9546-c24d6d66e7b8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.268056] env[63024]: DEBUG nova.compute.provider_tree [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1720.504851] env[63024]: DEBUG oslo_vmware.api [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Task: {'id': task-1950809, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076881} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1720.505194] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1720.506074] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43aa15e3-0866-4591-a3ad-43e5f41a7f90 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.535618] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Reconfiguring VM instance instance-00000026 to attach disk [datastore1] f6fddc23-ad36-4d6f-82a2-ded456b2596e/f6fddc23-ad36-4d6f-82a2-ded456b2596e.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1720.535961] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6ad67403-dfc2-45cb-b148-a9e174079a42 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.560363] env[63024]: DEBUG oslo_vmware.api [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Waiting for the task: (returnval){ [ 1720.560363] env[63024]: value = "task-1950810" [ 1720.560363] env[63024]: _type = "Task" [ 1720.560363] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1720.569180] env[63024]: DEBUG oslo_vmware.api [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Task: {'id': task-1950810, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.604147] env[63024]: DEBUG oslo_concurrency.lockutils [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Releasing lock "refresh_cache-df2933d1-32c3-48a6-8ceb-d5e3047d0b78" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1720.604498] env[63024]: DEBUG nova.compute.manager [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Instance network_info: |[{"id": "d9f698cf-c7f2-403c-92db-98c7ef61b086", "address": "fa:16:3e:77:77:c8", "network": {"id": "b800daa8-f0f9-4823-92b0-ff8d853cc0ff", "bridge": "br-int", "label": "tempest-ServersTestJSON-1406445940-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18540818b60e4483963d14559bc5c38d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec763be6-4041-4651-8fd7-3820cf0ab86d", "external-id": "nsx-vlan-transportzone-943", "segmentation_id": 943, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9f698cf-c7", "ovs_interfaceid": "d9f698cf-c7f2-403c-92db-98c7ef61b086", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1720.605134] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:77:77:c8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ec763be6-4041-4651-8fd7-3820cf0ab86d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd9f698cf-c7f2-403c-92db-98c7ef61b086', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1720.613623] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Creating folder: Project (18540818b60e4483963d14559bc5c38d). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1720.617091] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cf7077c8-7cef-4100-a263-e4897cc4a9d0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.622463] env[63024]: DEBUG nova.compute.manager [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1720.629516] env[63024]: DEBUG oslo_vmware.api [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52edca75-1d47-7961-c790-3d95e01ddf42, 'name': SearchDatastore_Task, 'duration_secs': 0.018673} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1720.629815] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1720.630056] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1720.630325] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1720.630466] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1720.630648] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1720.630902] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b7719fe9-9734-473b-8660-1ded4a41fc7c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.641236] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Created folder: Project (18540818b60e4483963d14559bc5c38d) in parent group-v401959. [ 1720.641484] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Creating folder: Instances. Parent ref: group-v402091. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1720.644187] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-306181be-7216-4e74-b5b3-3fb2a5f893e0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.648356] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1720.649937] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1720.650461] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-adcacac2-3e5a-4db2-b8a1-a601b0fa3958 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.655636] env[63024]: DEBUG nova.virt.hardware [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1720.655879] env[63024]: DEBUG nova.virt.hardware [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1720.656021] env[63024]: DEBUG nova.virt.hardware [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1720.656204] env[63024]: DEBUG nova.virt.hardware [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1720.656372] env[63024]: DEBUG nova.virt.hardware [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1720.656516] env[63024]: DEBUG nova.virt.hardware [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1720.656733] env[63024]: DEBUG nova.virt.hardware [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1720.656870] env[63024]: DEBUG nova.virt.hardware [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1720.657058] env[63024]: DEBUG nova.virt.hardware [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1720.657226] env[63024]: DEBUG nova.virt.hardware [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1720.657399] env[63024]: DEBUG nova.virt.hardware [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1720.658452] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d297104-fe87-4d3d-a27b-497a7eefe8ae {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.663441] env[63024]: DEBUG oslo_vmware.api [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1720.663441] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52cc487c-4476-febc-1b37-4cb4dd80be4e" [ 1720.663441] env[63024]: _type = "Task" [ 1720.663441] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1720.663720] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Created folder: Instances in parent group-v402091. [ 1720.663878] env[63024]: DEBUG oslo.service.loopingcall [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1720.664565] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1720.668307] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-269c781d-1f7a-476d-8b59-312c99e84206 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.687718] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e23b4eb-d333-4b2d-9e5e-946a8df37509 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.696382] env[63024]: DEBUG oslo_vmware.api [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52cc487c-4476-febc-1b37-4cb4dd80be4e, 'name': SearchDatastore_Task, 'duration_secs': 0.013413} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1720.696551] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1720.696551] env[63024]: value = "task-1950813" [ 1720.696551] env[63024]: _type = "Task" [ 1720.696551] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1720.697869] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f448e05a-7a69-45c3-ab01-418bda0c041b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1720.717446] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950813, 'name': CreateVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.718933] env[63024]: DEBUG oslo_vmware.api [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1720.718933] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]520ad2c4-57f8-0e9b-e2bf-1d1449e3017c" [ 1720.718933] env[63024]: _type = "Task" [ 1720.718933] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1720.727941] env[63024]: DEBUG oslo_vmware.api [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]520ad2c4-57f8-0e9b-e2bf-1d1449e3017c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1720.771262] env[63024]: DEBUG nova.scheduler.client.report [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1721.023944] env[63024]: DEBUG nova.compute.manager [req-c0637b81-50c0-42ef-b098-d7f895d865d2 req-fd490b62-d261-44a2-a1dd-a7d0a5f70fe8 service nova] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Received event network-changed-d9f698cf-c7f2-403c-92db-98c7ef61b086 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1721.024134] env[63024]: DEBUG nova.compute.manager [req-c0637b81-50c0-42ef-b098-d7f895d865d2 req-fd490b62-d261-44a2-a1dd-a7d0a5f70fe8 service nova] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Refreshing instance network info cache due to event network-changed-d9f698cf-c7f2-403c-92db-98c7ef61b086. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1721.024403] env[63024]: DEBUG oslo_concurrency.lockutils [req-c0637b81-50c0-42ef-b098-d7f895d865d2 req-fd490b62-d261-44a2-a1dd-a7d0a5f70fe8 service nova] Acquiring lock "refresh_cache-df2933d1-32c3-48a6-8ceb-d5e3047d0b78" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1721.024585] env[63024]: DEBUG oslo_concurrency.lockutils [req-c0637b81-50c0-42ef-b098-d7f895d865d2 req-fd490b62-d261-44a2-a1dd-a7d0a5f70fe8 service nova] Acquired lock "refresh_cache-df2933d1-32c3-48a6-8ceb-d5e3047d0b78" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1721.024759] env[63024]: DEBUG nova.network.neutron [req-c0637b81-50c0-42ef-b098-d7f895d865d2 req-fd490b62-d261-44a2-a1dd-a7d0a5f70fe8 service nova] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Refreshing network info cache for port d9f698cf-c7f2-403c-92db-98c7ef61b086 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1721.073448] env[63024]: DEBUG oslo_vmware.api [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Task: {'id': task-1950810, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.165911] env[63024]: DEBUG nova.network.neutron [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] Successfully updated port: bc1da74a-2cfe-4e8e-9682-78541f185723 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1721.210656] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950813, 'name': CreateVM_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.229772] env[63024]: DEBUG oslo_vmware.api [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]520ad2c4-57f8-0e9b-e2bf-1d1449e3017c, 'name': SearchDatastore_Task, 'duration_secs': 0.022223} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1721.230388] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1721.233085] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 8edc24d6-9073-4836-b14b-422df3ac1b88/8edc24d6-9073-4836-b14b-422df3ac1b88.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1721.233085] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-44ea26ba-9b01-47c3-8e94-85f6e4fba654 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.239292] env[63024]: DEBUG oslo_vmware.api [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1721.239292] env[63024]: value = "task-1950814" [ 1721.239292] env[63024]: _type = "Task" [ 1721.239292] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1721.247179] env[63024]: DEBUG oslo_vmware.api [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950814, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.279911] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.687s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1721.279911] env[63024]: DEBUG nova.compute.manager [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 37792b57-3347-4134-a060-53359afa3298] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1721.282860] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1c1e9c37-46be-4328-b652-77e4fa42b215 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.573s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1721.286240] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1c1e9c37-46be-4328-b652-77e4fa42b215 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.002s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1721.289208] env[63024]: DEBUG oslo_concurrency.lockutils [None req-148b4eee-1743-4e98-81cf-df60b01cb2f8 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.338s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1721.289544] env[63024]: DEBUG nova.objects.instance [None req-148b4eee-1743-4e98-81cf-df60b01cb2f8 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Lazy-loading 'resources' on Instance uuid 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1721.321637] env[63024]: INFO nova.scheduler.client.report [None req-1c1e9c37-46be-4328-b652-77e4fa42b215 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Deleted allocations for instance 1ad97ed0-2a84-4783-8511-e0f6b24861bd [ 1721.575146] env[63024]: DEBUG oslo_vmware.api [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Task: {'id': task-1950810, 'name': ReconfigVM_Task, 'duration_secs': 0.917174} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1721.575146] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Reconfigured VM instance instance-00000026 to attach disk [datastore1] f6fddc23-ad36-4d6f-82a2-ded456b2596e/f6fddc23-ad36-4d6f-82a2-ded456b2596e.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1721.576062] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-67ed6db7-6194-42a5-8484-f912e1833a47 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.586029] env[63024]: DEBUG oslo_vmware.api [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Waiting for the task: (returnval){ [ 1721.586029] env[63024]: value = "task-1950815" [ 1721.586029] env[63024]: _type = "Task" [ 1721.586029] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1721.593639] env[63024]: DEBUG oslo_vmware.api [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Task: {'id': task-1950815, 'name': Rename_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.667785] env[63024]: DEBUG oslo_concurrency.lockutils [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Acquiring lock "refresh_cache-3815d381-760d-40fc-98cf-8e6af287007f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1721.667928] env[63024]: DEBUG oslo_concurrency.lockutils [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Acquired lock "refresh_cache-3815d381-760d-40fc-98cf-8e6af287007f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1721.668921] env[63024]: DEBUG nova.network.neutron [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1721.714946] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950813, 'name': CreateVM_Task, 'duration_secs': 0.856013} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1721.715217] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1721.716488] env[63024]: DEBUG oslo_concurrency.lockutils [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1721.716758] env[63024]: DEBUG oslo_concurrency.lockutils [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1721.717714] env[63024]: DEBUG oslo_concurrency.lockutils [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1721.717994] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0c1729f-a4ef-49d8-b545-924fc35ec24b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1721.725715] env[63024]: DEBUG oslo_vmware.api [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 1721.725715] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]526759d6-64dd-ae6f-c089-82663d51cbf2" [ 1721.725715] env[63024]: _type = "Task" [ 1721.725715] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1721.736597] env[63024]: DEBUG oslo_vmware.api [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]526759d6-64dd-ae6f-c089-82663d51cbf2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.749408] env[63024]: DEBUG oslo_vmware.api [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950814, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1721.793718] env[63024]: DEBUG nova.network.neutron [req-c0637b81-50c0-42ef-b098-d7f895d865d2 req-fd490b62-d261-44a2-a1dd-a7d0a5f70fe8 service nova] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Updated VIF entry in instance network info cache for port d9f698cf-c7f2-403c-92db-98c7ef61b086. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1721.793718] env[63024]: DEBUG nova.network.neutron [req-c0637b81-50c0-42ef-b098-d7f895d865d2 req-fd490b62-d261-44a2-a1dd-a7d0a5f70fe8 service nova] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Updating instance_info_cache with network_info: [{"id": "d9f698cf-c7f2-403c-92db-98c7ef61b086", "address": "fa:16:3e:77:77:c8", "network": {"id": "b800daa8-f0f9-4823-92b0-ff8d853cc0ff", "bridge": "br-int", "label": "tempest-ServersTestJSON-1406445940-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18540818b60e4483963d14559bc5c38d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec763be6-4041-4651-8fd7-3820cf0ab86d", "external-id": "nsx-vlan-transportzone-943", "segmentation_id": 943, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9f698cf-c7", "ovs_interfaceid": "d9f698cf-c7f2-403c-92db-98c7ef61b086", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1721.794972] env[63024]: DEBUG nova.compute.utils [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1721.800534] env[63024]: DEBUG nova.compute.manager [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 37792b57-3347-4134-a060-53359afa3298] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1721.801409] env[63024]: DEBUG nova.network.neutron [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 37792b57-3347-4134-a060-53359afa3298] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1721.832705] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1c1e9c37-46be-4328-b652-77e4fa42b215 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "1ad97ed0-2a84-4783-8511-e0f6b24861bd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.414s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1721.852903] env[63024]: DEBUG nova.policy [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '54b7a5c8406e44e3a00cf903bc74e48d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '99c4328f2c8c4139b4eace4b465e37e3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1722.096817] env[63024]: DEBUG oslo_vmware.api [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Task: {'id': task-1950815, 'name': Rename_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.166906] env[63024]: DEBUG nova.network.neutron [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 37792b57-3347-4134-a060-53359afa3298] Successfully created port: f4505342-e35b-4162-a5ba-ed4d32e9ea65 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1722.183847] env[63024]: DEBUG nova.compute.manager [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Stashing vm_state: active {{(pid=63024) _prep_resize /opt/stack/nova/nova/compute/manager.py:5954}} [ 1722.215698] env[63024]: DEBUG nova.network.neutron [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1722.248330] env[63024]: DEBUG oslo_vmware.api [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]526759d6-64dd-ae6f-c089-82663d51cbf2, 'name': SearchDatastore_Task, 'duration_secs': 0.080579} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1722.248780] env[63024]: DEBUG oslo_concurrency.lockutils [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1722.249121] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1722.249430] env[63024]: DEBUG oslo_concurrency.lockutils [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1722.249638] env[63024]: DEBUG oslo_concurrency.lockutils [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1722.249921] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1722.253070] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b200a6ed-0916-46f5-a8fc-2b978343adf3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.258949] env[63024]: DEBUG oslo_vmware.api [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950814, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.269927] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1722.269927] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1722.271450] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3327172a-6f05-4053-87b6-4774994e82f0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.283386] env[63024]: DEBUG oslo_vmware.api [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 1722.283386] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52796457-609f-5aed-3f42-f17766d91e49" [ 1722.283386] env[63024]: _type = "Task" [ 1722.283386] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1722.293167] env[63024]: DEBUG oslo_vmware.api [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52796457-609f-5aed-3f42-f17766d91e49, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.301527] env[63024]: DEBUG nova.compute.manager [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 37792b57-3347-4134-a060-53359afa3298] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1722.305428] env[63024]: DEBUG oslo_concurrency.lockutils [req-c0637b81-50c0-42ef-b098-d7f895d865d2 req-fd490b62-d261-44a2-a1dd-a7d0a5f70fe8 service nova] Releasing lock "refresh_cache-df2933d1-32c3-48a6-8ceb-d5e3047d0b78" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1722.305428] env[63024]: DEBUG nova.compute.manager [req-c0637b81-50c0-42ef-b098-d7f895d865d2 req-fd490b62-d261-44a2-a1dd-a7d0a5f70fe8 service nova] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Received event network-changed-2edee58c-e76b-46ad-b4c8-3b2a70467c01 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1722.305428] env[63024]: DEBUG nova.compute.manager [req-c0637b81-50c0-42ef-b098-d7f895d865d2 req-fd490b62-d261-44a2-a1dd-a7d0a5f70fe8 service nova] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Refreshing instance network info cache due to event network-changed-2edee58c-e76b-46ad-b4c8-3b2a70467c01. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1722.309018] env[63024]: DEBUG oslo_concurrency.lockutils [req-c0637b81-50c0-42ef-b098-d7f895d865d2 req-fd490b62-d261-44a2-a1dd-a7d0a5f70fe8 service nova] Acquiring lock "refresh_cache-49eb6292-012a-4296-aff8-9c460866a602" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1722.309018] env[63024]: DEBUG oslo_concurrency.lockutils [req-c0637b81-50c0-42ef-b098-d7f895d865d2 req-fd490b62-d261-44a2-a1dd-a7d0a5f70fe8 service nova] Acquired lock "refresh_cache-49eb6292-012a-4296-aff8-9c460866a602" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1722.309018] env[63024]: DEBUG nova.network.neutron [req-c0637b81-50c0-42ef-b098-d7f895d865d2 req-fd490b62-d261-44a2-a1dd-a7d0a5f70fe8 service nova] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Refreshing network info cache for port 2edee58c-e76b-46ad-b4c8-3b2a70467c01 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1722.369940] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a35b8e38-d1b2-443d-bafa-7d0d4075f7d1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.379336] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-295d22b0-337b-4ed1-bc70-6610c781a4a2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.419531] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de223f49-f6f0-4764-8534-30746fb0ce67 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.428547] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00955b93-19f9-4ff9-b8da-cf7ecbc7a0a1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.451676] env[63024]: DEBUG nova.compute.provider_tree [None req-148b4eee-1743-4e98-81cf-df60b01cb2f8 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1722.487891] env[63024]: DEBUG nova.network.neutron [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] Updating instance_info_cache with network_info: [{"id": "bc1da74a-2cfe-4e8e-9682-78541f185723", "address": "fa:16:3e:62:94:7d", "network": {"id": "b22bfe1a-f169-41c3-ac9b-fdcf93268110", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.65", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d78401abb63840f4b461856cfdb6dbbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc1da74a-2c", "ovs_interfaceid": "bc1da74a-2cfe-4e8e-9682-78541f185723", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1722.597202] env[63024]: DEBUG oslo_vmware.api [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Task: {'id': task-1950815, 'name': Rename_Task, 'duration_secs': 0.529928} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1722.597927] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1722.598581] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-943dd2d3-a44b-4dae-a96c-b825172b3265 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.608748] env[63024]: DEBUG oslo_vmware.api [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Waiting for the task: (returnval){ [ 1722.608748] env[63024]: value = "task-1950816" [ 1722.608748] env[63024]: _type = "Task" [ 1722.608748] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1722.620629] env[63024]: DEBUG oslo_vmware.api [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Task: {'id': task-1950816, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.703068] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1722.752033] env[63024]: DEBUG oslo_vmware.api [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950814, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.792578] env[63024]: DEBUG oslo_vmware.api [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52796457-609f-5aed-3f42-f17766d91e49, 'name': SearchDatastore_Task, 'duration_secs': 0.012232} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1722.793417] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-838f3dea-d9ca-4b72-950c-3f5cf0dcc4f0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1722.800476] env[63024]: DEBUG oslo_vmware.api [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 1722.800476] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52f446af-7b47-088f-ed2e-9e4af861b1b6" [ 1722.800476] env[63024]: _type = "Task" [ 1722.800476] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1722.811876] env[63024]: DEBUG oslo_vmware.api [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52f446af-7b47-088f-ed2e-9e4af861b1b6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1722.955036] env[63024]: DEBUG nova.scheduler.client.report [None req-148b4eee-1743-4e98-81cf-df60b01cb2f8 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1722.990796] env[63024]: DEBUG oslo_concurrency.lockutils [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Releasing lock "refresh_cache-3815d381-760d-40fc-98cf-8e6af287007f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1722.990796] env[63024]: DEBUG nova.compute.manager [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] Instance network_info: |[{"id": "bc1da74a-2cfe-4e8e-9682-78541f185723", "address": "fa:16:3e:62:94:7d", "network": {"id": "b22bfe1a-f169-41c3-ac9b-fdcf93268110", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.65", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d78401abb63840f4b461856cfdb6dbbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc1da74a-2c", "ovs_interfaceid": "bc1da74a-2cfe-4e8e-9682-78541f185723", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1722.990796] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:62:94:7d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8652fa2-e608-4fe6-8b35-402a40906b40', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bc1da74a-2cfe-4e8e-9682-78541f185723', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1722.999410] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Creating folder: Project (30be8d2fefa341efb89aad6f645d79f1). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1723.000676] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5feb7e66-ce3f-4478-9880-0fd487bd6fcf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.013385] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Created folder: Project (30be8d2fefa341efb89aad6f645d79f1) in parent group-v401959. [ 1723.013567] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Creating folder: Instances. Parent ref: group-v402094. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1723.013810] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-00ee56a1-85b9-4c4b-957f-adf327d56139 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.024773] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Created folder: Instances in parent group-v402094. [ 1723.025022] env[63024]: DEBUG oslo.service.loopingcall [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1723.025737] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1723.025737] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b471414f-25ad-4b13-b327-c90bd4e212c9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.046661] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1723.046661] env[63024]: value = "task-1950819" [ 1723.046661] env[63024]: _type = "Task" [ 1723.046661] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1723.055638] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950819, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.121899] env[63024]: DEBUG oslo_vmware.api [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Task: {'id': task-1950816, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.218136] env[63024]: DEBUG nova.network.neutron [req-c0637b81-50c0-42ef-b098-d7f895d865d2 req-fd490b62-d261-44a2-a1dd-a7d0a5f70fe8 service nova] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Updated VIF entry in instance network info cache for port 2edee58c-e76b-46ad-b4c8-3b2a70467c01. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1723.218555] env[63024]: DEBUG nova.network.neutron [req-c0637b81-50c0-42ef-b098-d7f895d865d2 req-fd490b62-d261-44a2-a1dd-a7d0a5f70fe8 service nova] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Updating instance_info_cache with network_info: [{"id": "2edee58c-e76b-46ad-b4c8-3b2a70467c01", "address": "fa:16:3e:1a:71:e7", "network": {"id": "c16b8a08-a304-46c8-bf65-ad5caee54acc", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-528457489-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.225", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ad3f57a3e2224175812d7816ea5327fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2edee58c-e7", "ovs_interfaceid": "2edee58c-e76b-46ad-b4c8-3b2a70467c01", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1723.252607] env[63024]: DEBUG oslo_vmware.api [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950814, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.908187} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1723.252881] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 8edc24d6-9073-4836-b14b-422df3ac1b88/8edc24d6-9073-4836-b14b-422df3ac1b88.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1723.253149] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1723.253444] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a21493ce-31f9-4f77-85a5-f097593885a6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.263579] env[63024]: DEBUG oslo_vmware.api [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1723.263579] env[63024]: value = "task-1950820" [ 1723.263579] env[63024]: _type = "Task" [ 1723.263579] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1723.277496] env[63024]: DEBUG oslo_vmware.api [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950820, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.289331] env[63024]: DEBUG nova.compute.manager [req-93ec428c-3bbd-49dc-acb2-9fef90931daf req-db147ff6-7259-4db7-9de9-fc42d44be2c2 service nova] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] Received event network-vif-plugged-bc1da74a-2cfe-4e8e-9682-78541f185723 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1723.289496] env[63024]: DEBUG oslo_concurrency.lockutils [req-93ec428c-3bbd-49dc-acb2-9fef90931daf req-db147ff6-7259-4db7-9de9-fc42d44be2c2 service nova] Acquiring lock "3815d381-760d-40fc-98cf-8e6af287007f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1723.289742] env[63024]: DEBUG oslo_concurrency.lockutils [req-93ec428c-3bbd-49dc-acb2-9fef90931daf req-db147ff6-7259-4db7-9de9-fc42d44be2c2 service nova] Lock "3815d381-760d-40fc-98cf-8e6af287007f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1723.289905] env[63024]: DEBUG oslo_concurrency.lockutils [req-93ec428c-3bbd-49dc-acb2-9fef90931daf req-db147ff6-7259-4db7-9de9-fc42d44be2c2 service nova] Lock "3815d381-760d-40fc-98cf-8e6af287007f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1723.290113] env[63024]: DEBUG nova.compute.manager [req-93ec428c-3bbd-49dc-acb2-9fef90931daf req-db147ff6-7259-4db7-9de9-fc42d44be2c2 service nova] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] No waiting events found dispatching network-vif-plugged-bc1da74a-2cfe-4e8e-9682-78541f185723 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1723.290345] env[63024]: WARNING nova.compute.manager [req-93ec428c-3bbd-49dc-acb2-9fef90931daf req-db147ff6-7259-4db7-9de9-fc42d44be2c2 service nova] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] Received unexpected event network-vif-plugged-bc1da74a-2cfe-4e8e-9682-78541f185723 for instance with vm_state building and task_state spawning. [ 1723.290517] env[63024]: DEBUG nova.compute.manager [req-93ec428c-3bbd-49dc-acb2-9fef90931daf req-db147ff6-7259-4db7-9de9-fc42d44be2c2 service nova] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] Received event network-changed-bc1da74a-2cfe-4e8e-9682-78541f185723 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1723.290690] env[63024]: DEBUG nova.compute.manager [req-93ec428c-3bbd-49dc-acb2-9fef90931daf req-db147ff6-7259-4db7-9de9-fc42d44be2c2 service nova] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] Refreshing instance network info cache due to event network-changed-bc1da74a-2cfe-4e8e-9682-78541f185723. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1723.290871] env[63024]: DEBUG oslo_concurrency.lockutils [req-93ec428c-3bbd-49dc-acb2-9fef90931daf req-db147ff6-7259-4db7-9de9-fc42d44be2c2 service nova] Acquiring lock "refresh_cache-3815d381-760d-40fc-98cf-8e6af287007f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1723.291032] env[63024]: DEBUG oslo_concurrency.lockutils [req-93ec428c-3bbd-49dc-acb2-9fef90931daf req-db147ff6-7259-4db7-9de9-fc42d44be2c2 service nova] Acquired lock "refresh_cache-3815d381-760d-40fc-98cf-8e6af287007f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1723.291211] env[63024]: DEBUG nova.network.neutron [req-93ec428c-3bbd-49dc-acb2-9fef90931daf req-db147ff6-7259-4db7-9de9-fc42d44be2c2 service nova] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] Refreshing network info cache for port bc1da74a-2cfe-4e8e-9682-78541f185723 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1723.313336] env[63024]: DEBUG oslo_vmware.api [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52f446af-7b47-088f-ed2e-9e4af861b1b6, 'name': SearchDatastore_Task, 'duration_secs': 0.079865} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1723.313662] env[63024]: DEBUG oslo_concurrency.lockutils [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1723.314024] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] df2933d1-32c3-48a6-8ceb-d5e3047d0b78/df2933d1-32c3-48a6-8ceb-d5e3047d0b78.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1723.314348] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e82c6612-c6d9-468a-9df7-a55fa8b182e3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.318688] env[63024]: DEBUG nova.compute.manager [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 37792b57-3347-4134-a060-53359afa3298] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1723.329197] env[63024]: DEBUG oslo_vmware.api [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 1723.329197] env[63024]: value = "task-1950821" [ 1723.329197] env[63024]: _type = "Task" [ 1723.329197] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1723.341203] env[63024]: DEBUG oslo_vmware.api [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1950821, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.359269] env[63024]: DEBUG nova.virt.hardware [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1723.359821] env[63024]: DEBUG nova.virt.hardware [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1723.360772] env[63024]: DEBUG nova.virt.hardware [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1723.360772] env[63024]: DEBUG nova.virt.hardware [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1723.360772] env[63024]: DEBUG nova.virt.hardware [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1723.360772] env[63024]: DEBUG nova.virt.hardware [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1723.360970] env[63024]: DEBUG nova.virt.hardware [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1723.361167] env[63024]: DEBUG nova.virt.hardware [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1723.361360] env[63024]: DEBUG nova.virt.hardware [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1723.361538] env[63024]: DEBUG nova.virt.hardware [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1723.361713] env[63024]: DEBUG nova.virt.hardware [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1723.363556] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b503678e-c8b5-4c07-8525-da2e981909c1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.372877] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16d52526-3b91-4d10-bbf1-ab98eea81937 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.459022] env[63024]: DEBUG oslo_concurrency.lockutils [None req-148b4eee-1743-4e98-81cf-df60b01cb2f8 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.170s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1723.461818] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.213s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1723.462977] env[63024]: INFO nova.compute.claims [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1723.490343] env[63024]: INFO nova.scheduler.client.report [None req-148b4eee-1743-4e98-81cf-df60b01cb2f8 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Deleted allocations for instance 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3 [ 1723.560690] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950819, 'name': CreateVM_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.621947] env[63024]: DEBUG oslo_vmware.api [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Task: {'id': task-1950816, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.723921] env[63024]: DEBUG oslo_concurrency.lockutils [req-c0637b81-50c0-42ef-b098-d7f895d865d2 req-fd490b62-d261-44a2-a1dd-a7d0a5f70fe8 service nova] Releasing lock "refresh_cache-49eb6292-012a-4296-aff8-9c460866a602" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1723.782029] env[63024]: DEBUG oslo_vmware.api [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950820, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074416} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1723.782369] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1723.784735] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a27a2787-9e4c-4feb-a83d-a4f9bb27612e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.818604] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Reconfiguring VM instance instance-00000029 to attach disk [datastore1] 8edc24d6-9073-4836-b14b-422df3ac1b88/8edc24d6-9073-4836-b14b-422df3ac1b88.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1723.818604] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-050ed8af-6884-4e22-bb3e-c1f288b5ed68 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1723.847656] env[63024]: DEBUG oslo_vmware.api [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1950821, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.849611] env[63024]: DEBUG oslo_vmware.api [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1723.849611] env[63024]: value = "task-1950822" [ 1723.849611] env[63024]: _type = "Task" [ 1723.849611] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1723.861165] env[63024]: DEBUG oslo_vmware.api [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950822, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1723.901367] env[63024]: DEBUG nova.network.neutron [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 37792b57-3347-4134-a060-53359afa3298] Successfully updated port: f4505342-e35b-4162-a5ba-ed4d32e9ea65 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1724.006803] env[63024]: DEBUG oslo_concurrency.lockutils [None req-148b4eee-1743-4e98-81cf-df60b01cb2f8 tempest-ServerGroupTestJSON-1839349303 tempest-ServerGroupTestJSON-1839349303-project-member] Lock "94d9210e-ca8d-4ef1-a640-2d9a11ad87d3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.668s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1724.062225] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950819, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.122079] env[63024]: DEBUG oslo_vmware.api [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Task: {'id': task-1950816, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.345105] env[63024]: DEBUG oslo_vmware.api [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1950821, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.789929} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.345393] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] df2933d1-32c3-48a6-8ceb-d5e3047d0b78/df2933d1-32c3-48a6-8ceb-d5e3047d0b78.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1724.345705] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1724.345930] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c01f463c-a330-48db-886e-e05771601a8c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.354850] env[63024]: DEBUG oslo_vmware.api [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 1724.354850] env[63024]: value = "task-1950823" [ 1724.354850] env[63024]: _type = "Task" [ 1724.354850] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.361396] env[63024]: DEBUG oslo_vmware.api [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950822, 'name': ReconfigVM_Task, 'duration_secs': 0.48371} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.362095] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Reconfigured VM instance instance-00000029 to attach disk [datastore1] 8edc24d6-9073-4836-b14b-422df3ac1b88/8edc24d6-9073-4836-b14b-422df3ac1b88.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1724.362718] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d456c529-bf7b-475b-afd6-44ee06c90728 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.367625] env[63024]: DEBUG oslo_vmware.api [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1950823, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.369711] env[63024]: DEBUG oslo_vmware.api [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1724.369711] env[63024]: value = "task-1950824" [ 1724.369711] env[63024]: _type = "Task" [ 1724.369711] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.378621] env[63024]: DEBUG oslo_vmware.api [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950824, 'name': Rename_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.405532] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquiring lock "refresh_cache-37792b57-3347-4134-a060-53359afa3298" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1724.405702] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquired lock "refresh_cache-37792b57-3347-4134-a060-53359afa3298" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1724.405869] env[63024]: DEBUG nova.network.neutron [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 37792b57-3347-4134-a060-53359afa3298] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1724.558179] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950819, 'name': CreateVM_Task, 'duration_secs': 1.246921} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.558380] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1724.559216] env[63024]: DEBUG oslo_concurrency.lockutils [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1724.559445] env[63024]: DEBUG oslo_concurrency.lockutils [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1724.559736] env[63024]: DEBUG oslo_concurrency.lockutils [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1724.563252] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f480256-2156-49d0-aa4c-2aae527a59a0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.569168] env[63024]: DEBUG oslo_vmware.api [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Waiting for the task: (returnval){ [ 1724.569168] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]525f62a5-c52c-f0b6-80f7-b20fc632b11b" [ 1724.569168] env[63024]: _type = "Task" [ 1724.569168] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.578142] env[63024]: DEBUG oslo_vmware.api [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]525f62a5-c52c-f0b6-80f7-b20fc632b11b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.612084] env[63024]: DEBUG nova.network.neutron [req-93ec428c-3bbd-49dc-acb2-9fef90931daf req-db147ff6-7259-4db7-9de9-fc42d44be2c2 service nova] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] Updated VIF entry in instance network info cache for port bc1da74a-2cfe-4e8e-9682-78541f185723. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1724.612084] env[63024]: DEBUG nova.network.neutron [req-93ec428c-3bbd-49dc-acb2-9fef90931daf req-db147ff6-7259-4db7-9de9-fc42d44be2c2 service nova] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] Updating instance_info_cache with network_info: [{"id": "bc1da74a-2cfe-4e8e-9682-78541f185723", "address": "fa:16:3e:62:94:7d", "network": {"id": "b22bfe1a-f169-41c3-ac9b-fdcf93268110", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.65", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d78401abb63840f4b461856cfdb6dbbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc1da74a-2c", "ovs_interfaceid": "bc1da74a-2cfe-4e8e-9682-78541f185723", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1724.621262] env[63024]: DEBUG oslo_vmware.api [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Task: {'id': task-1950816, 'name': PowerOnVM_Task, 'duration_secs': 1.536084} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.621391] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1724.622606] env[63024]: INFO nova.compute.manager [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Took 19.68 seconds to spawn the instance on the hypervisor. [ 1724.622606] env[63024]: DEBUG nova.compute.manager [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1724.625062] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53e624da-5835-4541-9afe-37f74a6f4bb3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.869612] env[63024]: DEBUG oslo_vmware.api [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1950823, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076405} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.869898] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1724.871123] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67680560-3af9-41b3-a691-c9fae14e47d6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.885144] env[63024]: DEBUG oslo_vmware.api [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950824, 'name': Rename_Task, 'duration_secs': 0.185401} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1724.894482] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1724.903282] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Reconfiguring VM instance instance-0000002a to attach disk [datastore1] df2933d1-32c3-48a6-8ceb-d5e3047d0b78/df2933d1-32c3-48a6-8ceb-d5e3047d0b78.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1724.906126] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ba600a1b-a147-4d87-813c-c641256cb76e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.907860] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8b9d7ecb-debf-4d41-87c8-e462269854b9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1724.933307] env[63024]: DEBUG oslo_vmware.api [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1724.933307] env[63024]: value = "task-1950825" [ 1724.933307] env[63024]: _type = "Task" [ 1724.933307] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.933658] env[63024]: DEBUG oslo_vmware.api [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 1724.933658] env[63024]: value = "task-1950826" [ 1724.933658] env[63024]: _type = "Task" [ 1724.933658] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1724.952271] env[63024]: DEBUG oslo_vmware.api [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950825, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.952381] env[63024]: DEBUG oslo_vmware.api [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1950826, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1724.976788] env[63024]: DEBUG nova.network.neutron [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 37792b57-3347-4134-a060-53359afa3298] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1725.027824] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-975e7204-8cc1-4a66-b376-efa18993d17b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.037245] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38b923a8-10f8-450e-ae87-6d4a09311869 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.084261] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4720e776-393c-4dbf-8583-f62aa6270737 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.100732] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d50c58b8-0699-49fd-aae6-ab83878cb3f2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.103598] env[63024]: DEBUG oslo_vmware.api [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]525f62a5-c52c-f0b6-80f7-b20fc632b11b, 'name': SearchDatastore_Task, 'duration_secs': 0.027132} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1725.104096] env[63024]: DEBUG oslo_concurrency.lockutils [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1725.104549] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1725.104805] env[63024]: DEBUG oslo_concurrency.lockutils [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1725.105361] env[63024]: DEBUG oslo_concurrency.lockutils [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1725.105361] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1725.106261] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b8aaa7fe-26ff-4402-991e-d8191c23e401 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.120230] env[63024]: DEBUG oslo_concurrency.lockutils [req-93ec428c-3bbd-49dc-acb2-9fef90931daf req-db147ff6-7259-4db7-9de9-fc42d44be2c2 service nova] Releasing lock "refresh_cache-3815d381-760d-40fc-98cf-8e6af287007f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1725.121052] env[63024]: DEBUG nova.compute.provider_tree [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1725.132485] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1725.132695] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1725.133487] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0154aff-9eda-4f59-9295-70243b6f8f86 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.144620] env[63024]: DEBUG oslo_vmware.api [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Waiting for the task: (returnval){ [ 1725.144620] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52601dab-136c-f847-95ac-8acc89f11f56" [ 1725.144620] env[63024]: _type = "Task" [ 1725.144620] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1725.151669] env[63024]: INFO nova.compute.manager [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Took 65.29 seconds to build instance. [ 1725.161228] env[63024]: DEBUG oslo_vmware.api [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52601dab-136c-f847-95ac-8acc89f11f56, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.232933] env[63024]: DEBUG nova.network.neutron [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 37792b57-3347-4134-a060-53359afa3298] Updating instance_info_cache with network_info: [{"id": "f4505342-e35b-4162-a5ba-ed4d32e9ea65", "address": "fa:16:3e:db:89:d5", "network": {"id": "0719de66-1f31-4596-a9a1-11d65b13c2e5", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1221667646-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "99c4328f2c8c4139b4eace4b465e37e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e0c77754-4085-434b-a3e8-d61be099ac67", "external-id": "nsx-vlan-transportzone-822", "segmentation_id": 822, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4505342-e3", "ovs_interfaceid": "f4505342-e35b-4162-a5ba-ed4d32e9ea65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1725.447799] env[63024]: DEBUG oslo_vmware.api [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950825, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.451159] env[63024]: DEBUG oslo_vmware.api [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1950826, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.600166] env[63024]: DEBUG nova.compute.manager [req-eb32de31-1e18-4648-82f1-22bbaa074ae7 req-6acb0f61-cb40-4a72-b453-a6dd4523cd3a service nova] [instance: 37792b57-3347-4134-a060-53359afa3298] Received event network-vif-plugged-f4505342-e35b-4162-a5ba-ed4d32e9ea65 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1725.600433] env[63024]: DEBUG oslo_concurrency.lockutils [req-eb32de31-1e18-4648-82f1-22bbaa074ae7 req-6acb0f61-cb40-4a72-b453-a6dd4523cd3a service nova] Acquiring lock "37792b57-3347-4134-a060-53359afa3298-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1725.600665] env[63024]: DEBUG oslo_concurrency.lockutils [req-eb32de31-1e18-4648-82f1-22bbaa074ae7 req-6acb0f61-cb40-4a72-b453-a6dd4523cd3a service nova] Lock "37792b57-3347-4134-a060-53359afa3298-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1725.600845] env[63024]: DEBUG oslo_concurrency.lockutils [req-eb32de31-1e18-4648-82f1-22bbaa074ae7 req-6acb0f61-cb40-4a72-b453-a6dd4523cd3a service nova] Lock "37792b57-3347-4134-a060-53359afa3298-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1725.601072] env[63024]: DEBUG nova.compute.manager [req-eb32de31-1e18-4648-82f1-22bbaa074ae7 req-6acb0f61-cb40-4a72-b453-a6dd4523cd3a service nova] [instance: 37792b57-3347-4134-a060-53359afa3298] No waiting events found dispatching network-vif-plugged-f4505342-e35b-4162-a5ba-ed4d32e9ea65 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1725.601901] env[63024]: WARNING nova.compute.manager [req-eb32de31-1e18-4648-82f1-22bbaa074ae7 req-6acb0f61-cb40-4a72-b453-a6dd4523cd3a service nova] [instance: 37792b57-3347-4134-a060-53359afa3298] Received unexpected event network-vif-plugged-f4505342-e35b-4162-a5ba-ed4d32e9ea65 for instance with vm_state building and task_state spawning. [ 1725.602164] env[63024]: DEBUG nova.compute.manager [req-eb32de31-1e18-4648-82f1-22bbaa074ae7 req-6acb0f61-cb40-4a72-b453-a6dd4523cd3a service nova] [instance: 37792b57-3347-4134-a060-53359afa3298] Received event network-changed-f4505342-e35b-4162-a5ba-ed4d32e9ea65 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1725.602338] env[63024]: DEBUG nova.compute.manager [req-eb32de31-1e18-4648-82f1-22bbaa074ae7 req-6acb0f61-cb40-4a72-b453-a6dd4523cd3a service nova] [instance: 37792b57-3347-4134-a060-53359afa3298] Refreshing instance network info cache due to event network-changed-f4505342-e35b-4162-a5ba-ed4d32e9ea65. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1725.602521] env[63024]: DEBUG oslo_concurrency.lockutils [req-eb32de31-1e18-4648-82f1-22bbaa074ae7 req-6acb0f61-cb40-4a72-b453-a6dd4523cd3a service nova] Acquiring lock "refresh_cache-37792b57-3347-4134-a060-53359afa3298" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1725.645919] env[63024]: ERROR nova.scheduler.client.report [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [req-97217697-ae1d-4994-9ee6-517ddfdda869] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 89dfa68a-133e-436f-a9f1-86051f9fb96b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-97217697-ae1d-4994-9ee6-517ddfdda869"}]} [ 1725.658261] env[63024]: DEBUG oslo_concurrency.lockutils [None req-83503355-af6f-4a90-af74-d860ab316948 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Lock "f6fddc23-ad36-4d6f-82a2-ded456b2596e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 71.825s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1725.658529] env[63024]: DEBUG oslo_vmware.api [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52601dab-136c-f847-95ac-8acc89f11f56, 'name': SearchDatastore_Task, 'duration_secs': 0.05074} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1725.661640] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bbba2ec0-f3bd-42c0-93a9-b9708852d619 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.666762] env[63024]: DEBUG nova.scheduler.client.report [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Refreshing inventories for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1725.669261] env[63024]: DEBUG oslo_vmware.api [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Waiting for the task: (returnval){ [ 1725.669261] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]527b5080-8be7-585c-ba04-80ba1f99ea71" [ 1725.669261] env[63024]: _type = "Task" [ 1725.669261] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1725.685362] env[63024]: DEBUG oslo_vmware.api [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]527b5080-8be7-585c-ba04-80ba1f99ea71, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.686391] env[63024]: DEBUG nova.scheduler.client.report [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Updating ProviderTree inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1725.686594] env[63024]: DEBUG nova.compute.provider_tree [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1725.705614] env[63024]: DEBUG nova.scheduler.client.report [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Refreshing aggregate associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, aggregates: None {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1725.727888] env[63024]: DEBUG nova.scheduler.client.report [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Refreshing trait associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1725.736046] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Releasing lock "refresh_cache-37792b57-3347-4134-a060-53359afa3298" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1725.736160] env[63024]: DEBUG nova.compute.manager [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 37792b57-3347-4134-a060-53359afa3298] Instance network_info: |[{"id": "f4505342-e35b-4162-a5ba-ed4d32e9ea65", "address": "fa:16:3e:db:89:d5", "network": {"id": "0719de66-1f31-4596-a9a1-11d65b13c2e5", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1221667646-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "99c4328f2c8c4139b4eace4b465e37e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e0c77754-4085-434b-a3e8-d61be099ac67", "external-id": "nsx-vlan-transportzone-822", "segmentation_id": 822, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4505342-e3", "ovs_interfaceid": "f4505342-e35b-4162-a5ba-ed4d32e9ea65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1725.736380] env[63024]: DEBUG oslo_concurrency.lockutils [req-eb32de31-1e18-4648-82f1-22bbaa074ae7 req-6acb0f61-cb40-4a72-b453-a6dd4523cd3a service nova] Acquired lock "refresh_cache-37792b57-3347-4134-a060-53359afa3298" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1725.736605] env[63024]: DEBUG nova.network.neutron [req-eb32de31-1e18-4648-82f1-22bbaa074ae7 req-6acb0f61-cb40-4a72-b453-a6dd4523cd3a service nova] [instance: 37792b57-3347-4134-a060-53359afa3298] Refreshing network info cache for port f4505342-e35b-4162-a5ba-ed4d32e9ea65 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1725.738512] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 37792b57-3347-4134-a060-53359afa3298] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:db:89:d5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e0c77754-4085-434b-a3e8-d61be099ac67', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f4505342-e35b-4162-a5ba-ed4d32e9ea65', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1725.747465] env[63024]: DEBUG oslo.service.loopingcall [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1725.748945] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 37792b57-3347-4134-a060-53359afa3298] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1725.749206] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e97e5115-c1aa-44d1-87eb-cb1dea13fa2b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.774541] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1725.774541] env[63024]: value = "task-1950827" [ 1725.774541] env[63024]: _type = "Task" [ 1725.774541] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1725.786788] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950827, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1725.950032] env[63024]: DEBUG oslo_vmware.api [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1950826, 'name': ReconfigVM_Task, 'duration_secs': 0.928597} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1725.954519] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Reconfigured VM instance instance-0000002a to attach disk [datastore1] df2933d1-32c3-48a6-8ceb-d5e3047d0b78/df2933d1-32c3-48a6-8ceb-d5e3047d0b78.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1725.955261] env[63024]: DEBUG oslo_vmware.api [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950825, 'name': PowerOnVM_Task, 'duration_secs': 0.53228} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1725.957946] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f2e8c84d-dabc-45d8-9316-03c02c4eda2f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.959692] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1725.959903] env[63024]: INFO nova.compute.manager [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Took 10.60 seconds to spawn the instance on the hypervisor. [ 1725.960110] env[63024]: DEBUG nova.compute.manager [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1725.961147] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3cdc246-702f-43a2-b2e9-5aa7d21809b8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1725.973216] env[63024]: DEBUG oslo_vmware.api [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 1725.973216] env[63024]: value = "task-1950828" [ 1725.973216] env[63024]: _type = "Task" [ 1725.973216] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1725.985232] env[63024]: DEBUG oslo_vmware.api [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1950828, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.086411] env[63024]: DEBUG oslo_concurrency.lockutils [None req-35968ba6-c291-4e9a-b4f7-344d2af5b605 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Acquiring lock "f6fddc23-ad36-4d6f-82a2-ded456b2596e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1726.086411] env[63024]: DEBUG oslo_concurrency.lockutils [None req-35968ba6-c291-4e9a-b4f7-344d2af5b605 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Lock "f6fddc23-ad36-4d6f-82a2-ded456b2596e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1726.086411] env[63024]: DEBUG oslo_concurrency.lockutils [None req-35968ba6-c291-4e9a-b4f7-344d2af5b605 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Acquiring lock "f6fddc23-ad36-4d6f-82a2-ded456b2596e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1726.086616] env[63024]: DEBUG oslo_concurrency.lockutils [None req-35968ba6-c291-4e9a-b4f7-344d2af5b605 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Lock "f6fddc23-ad36-4d6f-82a2-ded456b2596e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1726.086863] env[63024]: DEBUG oslo_concurrency.lockutils [None req-35968ba6-c291-4e9a-b4f7-344d2af5b605 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Lock "f6fddc23-ad36-4d6f-82a2-ded456b2596e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1726.090119] env[63024]: INFO nova.compute.manager [None req-35968ba6-c291-4e9a-b4f7-344d2af5b605 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Terminating instance [ 1726.163264] env[63024]: DEBUG nova.compute.manager [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1726.183605] env[63024]: DEBUG oslo_vmware.api [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]527b5080-8be7-585c-ba04-80ba1f99ea71, 'name': SearchDatastore_Task, 'duration_secs': 0.027191} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1726.183867] env[63024]: DEBUG oslo_concurrency.lockutils [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1726.184141] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 3815d381-760d-40fc-98cf-8e6af287007f/3815d381-760d-40fc-98cf-8e6af287007f.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1726.184409] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8a0297c6-6d37-4440-a620-61e5f1477afa {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.196020] env[63024]: DEBUG oslo_vmware.api [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Waiting for the task: (returnval){ [ 1726.196020] env[63024]: value = "task-1950829" [ 1726.196020] env[63024]: _type = "Task" [ 1726.196020] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1726.287231] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950827, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.289077] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4247872-a51e-40a1-9d5e-e6f0c952d53e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.299836] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9750342d-b109-4f2c-936a-cd6db2f255be {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.340497] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76bfb640-dc75-435e-9e9d-da608c83450f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.350473] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13bbd5a2-e631-4d22-a19b-e699c9bc60f7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.355551] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d4038510-768d-49b3-bbcc-e23154a4f9ba tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "9716d592-32d1-4f1d-b42b-1c8a7d81d2f2" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1726.355805] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d4038510-768d-49b3-bbcc-e23154a4f9ba tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "9716d592-32d1-4f1d-b42b-1c8a7d81d2f2" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1726.355976] env[63024]: DEBUG nova.compute.manager [None req-d4038510-768d-49b3-bbcc-e23154a4f9ba tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1726.356789] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-077fc03e-5c72-4a5d-9f92-de35f503cbe3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.373304] env[63024]: DEBUG nova.compute.provider_tree [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1726.374776] env[63024]: DEBUG nova.compute.manager [None req-d4038510-768d-49b3-bbcc-e23154a4f9ba tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63024) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1726.375354] env[63024]: DEBUG nova.objects.instance [None req-d4038510-768d-49b3-bbcc-e23154a4f9ba tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lazy-loading 'flavor' on Instance uuid 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1726.483048] env[63024]: INFO nova.compute.manager [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Took 41.26 seconds to build instance. [ 1726.491269] env[63024]: DEBUG oslo_vmware.api [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1950828, 'name': Rename_Task, 'duration_secs': 0.233049} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1726.491269] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1726.491455] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b0c125fe-7481-473f-85e6-6e9e8cd3f874 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.499188] env[63024]: DEBUG oslo_vmware.api [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 1726.499188] env[63024]: value = "task-1950830" [ 1726.499188] env[63024]: _type = "Task" [ 1726.499188] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1726.509329] env[63024]: DEBUG oslo_vmware.api [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1950830, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.596077] env[63024]: DEBUG nova.compute.manager [None req-35968ba6-c291-4e9a-b4f7-344d2af5b605 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1726.596077] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-35968ba6-c291-4e9a-b4f7-344d2af5b605 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1726.596287] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8f10076-64bc-4bbc-91df-eaef293ba142 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.607645] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-35968ba6-c291-4e9a-b4f7-344d2af5b605 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1726.607989] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7715b8cd-9a21-4492-808d-19fbb372ae15 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.616395] env[63024]: DEBUG oslo_vmware.api [None req-35968ba6-c291-4e9a-b4f7-344d2af5b605 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Waiting for the task: (returnval){ [ 1726.616395] env[63024]: value = "task-1950831" [ 1726.616395] env[63024]: _type = "Task" [ 1726.616395] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1726.625989] env[63024]: DEBUG oslo_vmware.api [None req-35968ba6-c291-4e9a-b4f7-344d2af5b605 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Task: {'id': task-1950831, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.696573] env[63024]: DEBUG oslo_concurrency.lockutils [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1726.712900] env[63024]: DEBUG oslo_vmware.api [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Task: {'id': task-1950829, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.793029] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950827, 'name': CreateVM_Task, 'duration_secs': 0.525637} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1726.793029] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 37792b57-3347-4134-a060-53359afa3298] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1726.793029] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1726.793029] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1726.793029] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1726.793029] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6b79594-6004-4dd4-b3c4-b38610438ad7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1726.802515] env[63024]: DEBUG oslo_vmware.api [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1726.802515] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5236e0b4-3e7b-766f-4203-a8e27f35a4b7" [ 1726.802515] env[63024]: _type = "Task" [ 1726.802515] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1726.816234] env[63024]: DEBUG oslo_vmware.api [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5236e0b4-3e7b-766f-4203-a8e27f35a4b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1726.879650] env[63024]: DEBUG nova.scheduler.client.report [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1726.888497] env[63024]: DEBUG nova.network.neutron [req-eb32de31-1e18-4648-82f1-22bbaa074ae7 req-6acb0f61-cb40-4a72-b453-a6dd4523cd3a service nova] [instance: 37792b57-3347-4134-a060-53359afa3298] Updated VIF entry in instance network info cache for port f4505342-e35b-4162-a5ba-ed4d32e9ea65. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1726.888497] env[63024]: DEBUG nova.network.neutron [req-eb32de31-1e18-4648-82f1-22bbaa074ae7 req-6acb0f61-cb40-4a72-b453-a6dd4523cd3a service nova] [instance: 37792b57-3347-4134-a060-53359afa3298] Updating instance_info_cache with network_info: [{"id": "f4505342-e35b-4162-a5ba-ed4d32e9ea65", "address": "fa:16:3e:db:89:d5", "network": {"id": "0719de66-1f31-4596-a9a1-11d65b13c2e5", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1221667646-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "99c4328f2c8c4139b4eace4b465e37e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e0c77754-4085-434b-a3e8-d61be099ac67", "external-id": "nsx-vlan-transportzone-822", "segmentation_id": 822, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4505342-e3", "ovs_interfaceid": "f4505342-e35b-4162-a5ba-ed4d32e9ea65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1726.993287] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d760b9fa-5b19-40d5-856a-2b690c2812d9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "8edc24d6-9073-4836-b14b-422df3ac1b88" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.780s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1727.015941] env[63024]: DEBUG oslo_vmware.api [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1950830, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.130949] env[63024]: DEBUG oslo_vmware.api [None req-35968ba6-c291-4e9a-b4f7-344d2af5b605 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Task: {'id': task-1950831, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.208843] env[63024]: DEBUG oslo_vmware.api [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Task: {'id': task-1950829, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.713729} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1727.209131] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 3815d381-760d-40fc-98cf-8e6af287007f/3815d381-760d-40fc-98cf-8e6af287007f.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1727.209350] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1727.209616] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-73541359-fc13-4396-8fcb-b08489b44f26 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.217667] env[63024]: DEBUG oslo_vmware.api [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Waiting for the task: (returnval){ [ 1727.217667] env[63024]: value = "task-1950832" [ 1727.217667] env[63024]: _type = "Task" [ 1727.217667] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1727.230242] env[63024]: DEBUG oslo_vmware.api [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Task: {'id': task-1950832, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.313336] env[63024]: DEBUG oslo_vmware.api [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5236e0b4-3e7b-766f-4203-a8e27f35a4b7, 'name': SearchDatastore_Task, 'duration_secs': 0.054402} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1727.313697] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1727.313928] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 37792b57-3347-4134-a060-53359afa3298] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1727.314186] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1727.314341] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1727.314517] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1727.314794] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0a932da9-5146-4176-9127-43be6b8c1f52 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.324586] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1727.324765] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1727.325508] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-37a3e52b-9f63-412f-9ff0-151bb4165cf5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.332404] env[63024]: DEBUG oslo_vmware.api [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1727.332404] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e34531-d5f5-c264-ff1a-f991efda96d5" [ 1727.332404] env[63024]: _type = "Task" [ 1727.332404] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1727.341494] env[63024]: DEBUG oslo_vmware.api [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e34531-d5f5-c264-ff1a-f991efda96d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.387879] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.926s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1727.388417] env[63024]: DEBUG nova.compute.manager [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1727.397108] env[63024]: DEBUG oslo_concurrency.lockutils [None req-19e713c2-b35b-4f4c-874b-38e9317a2ced tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.836s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1727.397108] env[63024]: DEBUG nova.objects.instance [None req-19e713c2-b35b-4f4c-874b-38e9317a2ced tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Lazy-loading 'resources' on Instance uuid bd07735a-6a75-45fb-9cef-e1f2c301a489 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1727.397108] env[63024]: DEBUG oslo_concurrency.lockutils [req-eb32de31-1e18-4648-82f1-22bbaa074ae7 req-6acb0f61-cb40-4a72-b453-a6dd4523cd3a service nova] Releasing lock "refresh_cache-37792b57-3347-4134-a060-53359afa3298" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1727.397108] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4038510-768d-49b3-bbcc-e23154a4f9ba tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1727.397858] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a195e072-1d16-4609-a763-82bf80363723 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.408218] env[63024]: DEBUG oslo_vmware.api [None req-d4038510-768d-49b3-bbcc-e23154a4f9ba tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 1727.408218] env[63024]: value = "task-1950833" [ 1727.408218] env[63024]: _type = "Task" [ 1727.408218] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1727.425479] env[63024]: DEBUG oslo_vmware.api [None req-d4038510-768d-49b3-bbcc-e23154a4f9ba tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1950833, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.495432] env[63024]: DEBUG nova.compute.manager [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1727.515351] env[63024]: DEBUG oslo_vmware.api [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1950830, 'name': PowerOnVM_Task, 'duration_secs': 0.964076} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1727.515351] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1727.515351] env[63024]: INFO nova.compute.manager [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Took 9.53 seconds to spawn the instance on the hypervisor. [ 1727.515351] env[63024]: DEBUG nova.compute.manager [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1727.515351] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43a8480e-1a4f-40b4-add2-6f7f7d30acbb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.629467] env[63024]: DEBUG oslo_vmware.api [None req-35968ba6-c291-4e9a-b4f7-344d2af5b605 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Task: {'id': task-1950831, 'name': PowerOffVM_Task, 'duration_secs': 0.62189} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1727.629764] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-35968ba6-c291-4e9a-b4f7-344d2af5b605 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1727.629981] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-35968ba6-c291-4e9a-b4f7-344d2af5b605 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1727.630401] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a4d77ca8-f852-4698-ba34-63e7a5828fe8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.730843] env[63024]: DEBUG oslo_vmware.api [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Task: {'id': task-1950832, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069074} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1727.731431] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1727.732322] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c7ada49-9c8a-43ac-b438-557c6328a037 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.759539] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] Reconfiguring VM instance instance-0000002b to attach disk [datastore1] 3815d381-760d-40fc-98cf-8e6af287007f/3815d381-760d-40fc-98cf-8e6af287007f.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1727.759539] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-df8eb39c-0904-4366-bfe1-ddd559476869 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.786438] env[63024]: DEBUG oslo_vmware.api [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Waiting for the task: (returnval){ [ 1727.786438] env[63024]: value = "task-1950835" [ 1727.786438] env[63024]: _type = "Task" [ 1727.786438] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1727.795212] env[63024]: DEBUG oslo_vmware.api [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Task: {'id': task-1950835, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.844514] env[63024]: DEBUG oslo_vmware.api [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e34531-d5f5-c264-ff1a-f991efda96d5, 'name': SearchDatastore_Task, 'duration_secs': 0.015158} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1727.846288] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-34ff7efd-63e2-4a54-bd28-28887eb21b9f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.851949] env[63024]: DEBUG oslo_vmware.api [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1727.851949] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52085db1-80e1-c30b-76cb-c7820511de39" [ 1727.851949] env[63024]: _type = "Task" [ 1727.851949] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1727.861577] env[63024]: DEBUG oslo_vmware.api [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52085db1-80e1-c30b-76cb-c7820511de39, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.893618] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-35968ba6-c291-4e9a-b4f7-344d2af5b605 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1727.893618] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-35968ba6-c291-4e9a-b4f7-344d2af5b605 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1727.893618] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-35968ba6-c291-4e9a-b4f7-344d2af5b605 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Deleting the datastore file [datastore1] f6fddc23-ad36-4d6f-82a2-ded456b2596e {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1727.893618] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-15f4503e-30d0-4bdf-ad24-338550cb7302 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1727.903184] env[63024]: DEBUG nova.compute.utils [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1727.905481] env[63024]: DEBUG nova.compute.manager [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1727.905664] env[63024]: DEBUG nova.network.neutron [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1727.908696] env[63024]: DEBUG oslo_vmware.api [None req-35968ba6-c291-4e9a-b4f7-344d2af5b605 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Waiting for the task: (returnval){ [ 1727.908696] env[63024]: value = "task-1950836" [ 1727.908696] env[63024]: _type = "Task" [ 1727.908696] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1727.927563] env[63024]: DEBUG oslo_vmware.api [None req-d4038510-768d-49b3-bbcc-e23154a4f9ba tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1950833, 'name': PowerOffVM_Task, 'duration_secs': 0.236866} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1727.930867] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4038510-768d-49b3-bbcc-e23154a4f9ba tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1727.930983] env[63024]: DEBUG nova.compute.manager [None req-d4038510-768d-49b3-bbcc-e23154a4f9ba tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1727.931571] env[63024]: DEBUG oslo_vmware.api [None req-35968ba6-c291-4e9a-b4f7-344d2af5b605 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Task: {'id': task-1950836, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1727.933032] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4505acf1-8cf8-4f1a-9ab5-1e5b9dd21af4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.022752] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1728.047570] env[63024]: DEBUG nova.policy [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4a59445f732e4801b5e6020b488adb59', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5577b40f56af44eebd47761192e9510f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1728.052876] env[63024]: INFO nova.compute.manager [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Took 41.47 seconds to build instance. [ 1728.298872] env[63024]: DEBUG oslo_vmware.api [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Task: {'id': task-1950835, 'name': ReconfigVM_Task, 'duration_secs': 0.437025} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1728.302237] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] Reconfigured VM instance instance-0000002b to attach disk [datastore1] 3815d381-760d-40fc-98cf-8e6af287007f/3815d381-760d-40fc-98cf-8e6af287007f.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1728.303243] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c1400947-5e0b-4a10-8a47-193e27587220 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.315858] env[63024]: DEBUG oslo_vmware.api [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Waiting for the task: (returnval){ [ 1728.315858] env[63024]: value = "task-1950837" [ 1728.315858] env[63024]: _type = "Task" [ 1728.315858] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1728.328362] env[63024]: DEBUG oslo_vmware.api [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Task: {'id': task-1950837, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.373366] env[63024]: DEBUG oslo_vmware.api [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52085db1-80e1-c30b-76cb-c7820511de39, 'name': SearchDatastore_Task, 'duration_secs': 0.034466} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1728.373939] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1728.374239] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 37792b57-3347-4134-a060-53359afa3298/37792b57-3347-4134-a060-53359afa3298.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1728.374489] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ecccb645-ed9a-401e-9578-923ef12fdc32 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.389722] env[63024]: DEBUG oslo_vmware.api [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1728.389722] env[63024]: value = "task-1950838" [ 1728.389722] env[63024]: _type = "Task" [ 1728.389722] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1728.402622] env[63024]: DEBUG oslo_vmware.api [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950838, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.410814] env[63024]: DEBUG nova.compute.manager [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1728.423372] env[63024]: DEBUG oslo_vmware.api [None req-35968ba6-c291-4e9a-b4f7-344d2af5b605 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Task: {'id': task-1950836, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.24263} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1728.427135] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-35968ba6-c291-4e9a-b4f7-344d2af5b605 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1728.427135] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-35968ba6-c291-4e9a-b4f7-344d2af5b605 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1728.428173] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-35968ba6-c291-4e9a-b4f7-344d2af5b605 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1728.428173] env[63024]: INFO nova.compute.manager [None req-35968ba6-c291-4e9a-b4f7-344d2af5b605 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Took 1.83 seconds to destroy the instance on the hypervisor. [ 1728.428173] env[63024]: DEBUG oslo.service.loopingcall [None req-35968ba6-c291-4e9a-b4f7-344d2af5b605 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1728.428349] env[63024]: DEBUG nova.compute.manager [-] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1728.428349] env[63024]: DEBUG nova.network.neutron [-] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1728.452272] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d4038510-768d-49b3-bbcc-e23154a4f9ba tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "9716d592-32d1-4f1d-b42b-1c8a7d81d2f2" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.095s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1728.556838] env[63024]: DEBUG oslo_concurrency.lockutils [None req-96b0bdb0-0f70-4c03-81fb-6443b1043af3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "df2933d1-32c3-48a6-8ceb-d5e3047d0b78" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.991s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1728.571021] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff7f34ec-b0de-4aa5-ab89-96fb1a702a15 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.581922] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-968fcbd5-124e-4787-a64f-1e09aef8ef11 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.616697] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cfdb429-ec6c-495c-8f84-c0ab77a90571 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.626468] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21aa9df1-59e7-4c43-bc0c-fb99a5b77ff5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.641515] env[63024]: DEBUG nova.compute.provider_tree [None req-19e713c2-b35b-4f4c-874b-38e9317a2ced tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1728.833851] env[63024]: DEBUG oslo_vmware.api [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Task: {'id': task-1950837, 'name': Rename_Task, 'duration_secs': 0.233523} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1728.838170] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1728.838491] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-58bfca56-fa17-4ef2-beda-7063d97b7a97 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.849877] env[63024]: DEBUG oslo_vmware.api [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Waiting for the task: (returnval){ [ 1728.849877] env[63024]: value = "task-1950839" [ 1728.849877] env[63024]: _type = "Task" [ 1728.849877] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1728.866211] env[63024]: DEBUG oslo_vmware.api [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Task: {'id': task-1950839, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1728.903147] env[63024]: DEBUG oslo_vmware.api [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950838, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.145554] env[63024]: DEBUG nova.scheduler.client.report [None req-19e713c2-b35b-4f4c-874b-38e9317a2ced tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1729.337592] env[63024]: DEBUG nova.network.neutron [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Successfully created port: c6c42da9-f98c-4f7e-94e7-39d45bc8f882 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1729.364741] env[63024]: DEBUG oslo_vmware.api [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Task: {'id': task-1950839, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.401581] env[63024]: DEBUG oslo_vmware.api [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950838, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.904873} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1729.402109] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 37792b57-3347-4134-a060-53359afa3298/37792b57-3347-4134-a060-53359afa3298.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1729.402369] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 37792b57-3347-4134-a060-53359afa3298] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1729.402800] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-66bb4720-0e48-4c6b-8568-c58b684fc77e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.412171] env[63024]: DEBUG oslo_vmware.api [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1729.412171] env[63024]: value = "task-1950840" [ 1729.412171] env[63024]: _type = "Task" [ 1729.412171] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1729.425146] env[63024]: DEBUG oslo_vmware.api [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950840, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1729.427910] env[63024]: DEBUG nova.compute.manager [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1729.458817] env[63024]: DEBUG nova.virt.hardware [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1729.459498] env[63024]: DEBUG nova.virt.hardware [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1729.459716] env[63024]: DEBUG nova.virt.hardware [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1729.459907] env[63024]: DEBUG nova.virt.hardware [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1729.460073] env[63024]: DEBUG nova.virt.hardware [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1729.460261] env[63024]: DEBUG nova.virt.hardware [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1729.460473] env[63024]: DEBUG nova.virt.hardware [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1729.460650] env[63024]: DEBUG nova.virt.hardware [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1729.460833] env[63024]: DEBUG nova.virt.hardware [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1729.461015] env[63024]: DEBUG nova.virt.hardware [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1729.461204] env[63024]: DEBUG nova.virt.hardware [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1729.464046] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75b6e68b-cd98-4b9f-8552-03797bc69f0c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.473250] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cdccfac-29cd-42c2-9609-81ba0881e7b5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.651393] env[63024]: DEBUG oslo_concurrency.lockutils [None req-19e713c2-b35b-4f4c-874b-38e9317a2ced tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.258s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1729.655603] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.859s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1729.657672] env[63024]: INFO nova.compute.claims [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1729.678619] env[63024]: INFO nova.scheduler.client.report [None req-19e713c2-b35b-4f4c-874b-38e9317a2ced tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Deleted allocations for instance bd07735a-6a75-45fb-9cef-e1f2c301a489 [ 1729.807329] env[63024]: DEBUG nova.network.neutron [-] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1729.862808] env[63024]: DEBUG oslo_vmware.api [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Task: {'id': task-1950839, 'name': PowerOnVM_Task, 'duration_secs': 0.941571} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1729.864439] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1729.864699] env[63024]: INFO nova.compute.manager [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] Took 9.24 seconds to spawn the instance on the hypervisor. [ 1729.864882] env[63024]: DEBUG nova.compute.manager [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1729.865942] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51636b90-ff71-45ec-99b4-31302885153d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.926644] env[63024]: DEBUG oslo_vmware.api [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950840, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071262} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1729.927172] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 37792b57-3347-4134-a060-53359afa3298] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1729.928979] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5689278d-1bf6-4f04-a4ca-e32edf812500 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.959537] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 37792b57-3347-4134-a060-53359afa3298] Reconfiguring VM instance instance-0000002c to attach disk [datastore1] 37792b57-3347-4134-a060-53359afa3298/37792b57-3347-4134-a060-53359afa3298.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1729.960582] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-95a41e8e-b606-4838-9d7d-bebe16ca48e9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1729.983026] env[63024]: DEBUG oslo_vmware.api [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1729.983026] env[63024]: value = "task-1950841" [ 1729.983026] env[63024]: _type = "Task" [ 1729.983026] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1729.993322] env[63024]: DEBUG oslo_vmware.api [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950841, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1730.080825] env[63024]: DEBUG nova.compute.manager [req-3e7966b8-895b-4a1e-98ef-6198dea63f9b req-783f5ca7-123c-4d02-8b0e-d979e32f1101 service nova] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Received event network-vif-deleted-52fd1b1a-cd75-4bd3-967c-b98213510df1 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1730.191028] env[63024]: DEBUG oslo_concurrency.lockutils [None req-19e713c2-b35b-4f4c-874b-38e9317a2ced tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Lock "bd07735a-6a75-45fb-9cef-e1f2c301a489" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.809s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1730.313675] env[63024]: INFO nova.compute.manager [-] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Took 1.89 seconds to deallocate network for instance. [ 1730.331131] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fb55bb59-ab72-4dee-8b5d-9286fef32c95 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Acquiring lock "b7f26f0e-d5a9-42a6-8af2-065659f89cf5" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1730.331401] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fb55bb59-ab72-4dee-8b5d-9286fef32c95 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Lock "b7f26f0e-d5a9-42a6-8af2-065659f89cf5" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1730.389929] env[63024]: INFO nova.compute.manager [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] Took 43.43 seconds to build instance. [ 1730.494655] env[63024]: DEBUG oslo_vmware.api [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950841, 'name': ReconfigVM_Task, 'duration_secs': 0.483731} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1730.495089] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 37792b57-3347-4134-a060-53359afa3298] Reconfigured VM instance instance-0000002c to attach disk [datastore1] 37792b57-3347-4134-a060-53359afa3298/37792b57-3347-4134-a060-53359afa3298.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1730.495839] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-32ce1dfa-ba41-4f7f-b13a-1cac22f72349 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1730.505072] env[63024]: DEBUG oslo_vmware.api [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1730.505072] env[63024]: value = "task-1950842" [ 1730.505072] env[63024]: _type = "Task" [ 1730.505072] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1730.515584] env[63024]: DEBUG oslo_vmware.api [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950842, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1730.823696] env[63024]: DEBUG oslo_concurrency.lockutils [None req-35968ba6-c291-4e9a-b4f7-344d2af5b605 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1730.836686] env[63024]: DEBUG nova.compute.utils [None req-fb55bb59-ab72-4dee-8b5d-9286fef32c95 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1730.892433] env[63024]: DEBUG oslo_concurrency.lockutils [None req-38800399-b20d-4162-af61-7dd6818cc46c tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Lock "3815d381-760d-40fc-98cf-8e6af287007f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 44.940s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1731.018157] env[63024]: DEBUG oslo_vmware.api [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950842, 'name': Rename_Task, 'duration_secs': 0.150315} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1731.018489] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 37792b57-3347-4134-a060-53359afa3298] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1731.018961] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-58b96367-8c69-49ed-b119-22cc395e36f1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.025606] env[63024]: DEBUG oslo_vmware.api [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1731.025606] env[63024]: value = "task-1950843" [ 1731.025606] env[63024]: _type = "Task" [ 1731.025606] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1731.035463] env[63024]: DEBUG oslo_vmware.api [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950843, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1731.081288] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6320b9bf-cba8-4b69-a02f-ec6140d94dd3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "e2086b87-ae9c-4968-a847-ac91e5345ec8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1731.081288] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6320b9bf-cba8-4b69-a02f-ec6140d94dd3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "e2086b87-ae9c-4968-a847-ac91e5345ec8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1731.157527] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3216a29e-7337-4c59-81c7-9bc2fdcf655b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.170381] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2297a9be-0031-4e7a-b9e5-232f94b5ac8d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.205304] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-767c42c7-8fd4-483b-a6d4-78ae22685a3f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.214932] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbf303d8-5916-447d-a8a3-8f4a89aeb671 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.231351] env[63024]: DEBUG nova.compute.provider_tree [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1731.341418] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fb55bb59-ab72-4dee-8b5d-9286fef32c95 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Lock "b7f26f0e-d5a9-42a6-8af2-065659f89cf5" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.009s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1731.383598] env[63024]: DEBUG nova.objects.instance [None req-a4bf2dfa-2894-48ee-befe-9e9a3665b5dd tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lazy-loading 'flavor' on Instance uuid 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1731.396871] env[63024]: DEBUG nova.network.neutron [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Successfully updated port: c6c42da9-f98c-4f7e-94e7-39d45bc8f882 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1731.538684] env[63024]: DEBUG oslo_vmware.api [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950843, 'name': PowerOnVM_Task, 'duration_secs': 0.498313} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1731.539047] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 37792b57-3347-4134-a060-53359afa3298] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1731.539339] env[63024]: INFO nova.compute.manager [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 37792b57-3347-4134-a060-53359afa3298] Took 8.22 seconds to spawn the instance on the hypervisor. [ 1731.539610] env[63024]: DEBUG nova.compute.manager [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 37792b57-3347-4134-a060-53359afa3298] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1731.540762] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-469221ec-e270-4790-bb49-624cfa474e1d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.587143] env[63024]: DEBUG nova.compute.manager [None req-6320b9bf-cba8-4b69-a02f-ec6140d94dd3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e2086b87-ae9c-4968-a847-ac91e5345ec8] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1731.607582] env[63024]: INFO nova.compute.manager [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Rebuilding instance [ 1731.667281] env[63024]: DEBUG oslo_concurrency.lockutils [None req-33fa5d9a-e11c-4d20-84c9-2e051d492713 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Acquiring lock "b765b8b3-a099-4e23-be30-d1178ecffc37" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1731.668312] env[63024]: DEBUG oslo_concurrency.lockutils [None req-33fa5d9a-e11c-4d20-84c9-2e051d492713 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Lock "b765b8b3-a099-4e23-be30-d1178ecffc37" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1731.668312] env[63024]: DEBUG oslo_concurrency.lockutils [None req-33fa5d9a-e11c-4d20-84c9-2e051d492713 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Acquiring lock "b765b8b3-a099-4e23-be30-d1178ecffc37-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1731.668312] env[63024]: DEBUG oslo_concurrency.lockutils [None req-33fa5d9a-e11c-4d20-84c9-2e051d492713 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Lock "b765b8b3-a099-4e23-be30-d1178ecffc37-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1731.668312] env[63024]: DEBUG oslo_concurrency.lockutils [None req-33fa5d9a-e11c-4d20-84c9-2e051d492713 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Lock "b765b8b3-a099-4e23-be30-d1178ecffc37-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1731.670028] env[63024]: DEBUG nova.compute.manager [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1731.670163] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca139668-5f50-428e-9f29-7e05f18cb547 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1731.673216] env[63024]: INFO nova.compute.manager [None req-33fa5d9a-e11c-4d20-84c9-2e051d492713 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Terminating instance [ 1731.735177] env[63024]: DEBUG nova.scheduler.client.report [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1731.888157] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a4bf2dfa-2894-48ee-befe-9e9a3665b5dd tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "refresh_cache-9716d592-32d1-4f1d-b42b-1c8a7d81d2f2" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1731.888696] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a4bf2dfa-2894-48ee-befe-9e9a3665b5dd tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquired lock "refresh_cache-9716d592-32d1-4f1d-b42b-1c8a7d81d2f2" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1731.888696] env[63024]: DEBUG nova.network.neutron [None req-a4bf2dfa-2894-48ee-befe-9e9a3665b5dd tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1731.888696] env[63024]: DEBUG nova.objects.instance [None req-a4bf2dfa-2894-48ee-befe-9e9a3665b5dd tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lazy-loading 'info_cache' on Instance uuid 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1731.899310] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquiring lock "refresh_cache-c1fd4146-6dd3-49e9-a744-466e6168e158" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1731.899762] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquired lock "refresh_cache-c1fd4146-6dd3-49e9-a744-466e6168e158" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1731.899954] env[63024]: DEBUG nova.network.neutron [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1732.067124] env[63024]: INFO nova.compute.manager [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 37792b57-3347-4134-a060-53359afa3298] Took 44.06 seconds to build instance. [ 1732.108085] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6320b9bf-cba8-4b69-a02f-ec6140d94dd3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1732.177347] env[63024]: DEBUG nova.compute.manager [None req-33fa5d9a-e11c-4d20-84c9-2e051d492713 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1732.177689] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-33fa5d9a-e11c-4d20-84c9-2e051d492713 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1732.179475] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26595b0b-e318-41f2-8acc-4fca880275a1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.189109] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-33fa5d9a-e11c-4d20-84c9-2e051d492713 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1732.189388] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-46d5c279-50d5-455b-900b-d62609f5ee6a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.198921] env[63024]: DEBUG oslo_vmware.api [None req-33fa5d9a-e11c-4d20-84c9-2e051d492713 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Waiting for the task: (returnval){ [ 1732.198921] env[63024]: value = "task-1950844" [ 1732.198921] env[63024]: _type = "Task" [ 1732.198921] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1732.210602] env[63024]: DEBUG oslo_vmware.api [None req-33fa5d9a-e11c-4d20-84c9-2e051d492713 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950844, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.242501] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.588s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1732.243469] env[63024]: DEBUG nova.compute.manager [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1732.248408] env[63024]: DEBUG oslo_concurrency.lockutils [None req-05840052-0709-42e5-b3e4-1e227ebc43d1 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.192s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1732.248408] env[63024]: DEBUG nova.objects.instance [None req-05840052-0709-42e5-b3e4-1e227ebc43d1 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Lazy-loading 'resources' on Instance uuid 61fdfa06-cb40-44a3-8abc-428b26bd40f5 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1732.259374] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Acquiring lock "839776ef-0562-424d-b301-2aa896f32e14" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1732.259764] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Lock "839776ef-0562-424d-b301-2aa896f32e14" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1732.397988] env[63024]: DEBUG nova.objects.base [None req-a4bf2dfa-2894-48ee-befe-9e9a3665b5dd tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Object Instance<9716d592-32d1-4f1d-b42b-1c8a7d81d2f2> lazy-loaded attributes: flavor,info_cache {{(pid=63024) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1732.425984] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fb55bb59-ab72-4dee-8b5d-9286fef32c95 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Acquiring lock "b7f26f0e-d5a9-42a6-8af2-065659f89cf5" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1732.426317] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fb55bb59-ab72-4dee-8b5d-9286fef32c95 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Lock "b7f26f0e-d5a9-42a6-8af2-065659f89cf5" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1732.426582] env[63024]: INFO nova.compute.manager [None req-fb55bb59-ab72-4dee-8b5d-9286fef32c95 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Attaching volume eb2fb348-971e-4d2b-9510-bd374d5998e0 to /dev/sdb [ 1732.455765] env[63024]: DEBUG nova.network.neutron [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1732.458809] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e5892a03-6724-4137-9976-8580386b01d7 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "interface-9cf45c3a-2a74-4f8e-8817-47bbd748a44b-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1732.459094] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e5892a03-6724-4137-9976-8580386b01d7 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "interface-9cf45c3a-2a74-4f8e-8817-47bbd748a44b-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1732.459469] env[63024]: DEBUG nova.objects.instance [None req-e5892a03-6724-4137-9976-8580386b01d7 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lazy-loading 'flavor' on Instance uuid 9cf45c3a-2a74-4f8e-8817-47bbd748a44b {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1732.480086] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec643fb4-7cbb-41f7-a7cf-bae272f82185 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.492619] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab6af66b-d2e9-48c8-ade0-414ef992acf1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.508324] env[63024]: DEBUG nova.virt.block_device [None req-fb55bb59-ab72-4dee-8b5d-9286fef32c95 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Updating existing volume attachment record: 734f324f-8674-4c2e-9527-d4d2bddc15b8 {{(pid=63024) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1732.569261] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d06c8933-234b-4cbe-8b34-100c287bb4e3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "37792b57-3347-4134-a060-53359afa3298" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.578s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1732.688286] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1732.688604] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9d1c08f2-b71a-4b26-b39f-007df0178e99 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.706048] env[63024]: DEBUG oslo_vmware.api [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1732.706048] env[63024]: value = "task-1950846" [ 1732.706048] env[63024]: _type = "Task" [ 1732.706048] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1732.715229] env[63024]: DEBUG oslo_vmware.api [None req-33fa5d9a-e11c-4d20-84c9-2e051d492713 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950844, 'name': PowerOffVM_Task, 'duration_secs': 0.278323} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1732.718133] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-33fa5d9a-e11c-4d20-84c9-2e051d492713 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1732.718354] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-33fa5d9a-e11c-4d20-84c9-2e051d492713 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1732.719460] env[63024]: DEBUG oslo_vmware.api [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950846, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1732.719818] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9d452f79-cd22-4da9-8ff7-a01a83191581 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.723366] env[63024]: DEBUG nova.network.neutron [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Updating instance_info_cache with network_info: [{"id": "c6c42da9-f98c-4f7e-94e7-39d45bc8f882", "address": "fa:16:3e:3f:14:c4", "network": {"id": "a1a2ebf5-0a3e-4f93-9a47-bd8cdab108ce", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1557111633-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5577b40f56af44eebd47761192e9510f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6c42da9-f9", "ovs_interfaceid": "c6c42da9-f98c-4f7e-94e7-39d45bc8f882", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1732.749704] env[63024]: DEBUG nova.compute.utils [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1732.755069] env[63024]: DEBUG nova.compute.manager [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1732.755069] env[63024]: DEBUG nova.network.neutron [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1732.764580] env[63024]: DEBUG nova.compute.manager [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1732.816520] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-33fa5d9a-e11c-4d20-84c9-2e051d492713 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1732.816583] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-33fa5d9a-e11c-4d20-84c9-2e051d492713 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1732.816797] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-33fa5d9a-e11c-4d20-84c9-2e051d492713 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Deleting the datastore file [datastore1] b765b8b3-a099-4e23-be30-d1178ecffc37 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1732.819518] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eaffd1c4-f427-4db6-8c07-bc5fc5f3ac55 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1732.822536] env[63024]: DEBUG nova.policy [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4a59445f732e4801b5e6020b488adb59', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5577b40f56af44eebd47761192e9510f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1732.829978] env[63024]: DEBUG oslo_vmware.api [None req-33fa5d9a-e11c-4d20-84c9-2e051d492713 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Waiting for the task: (returnval){ [ 1732.829978] env[63024]: value = "task-1950848" [ 1732.829978] env[63024]: _type = "Task" [ 1732.829978] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1732.844329] env[63024]: DEBUG oslo_vmware.api [None req-33fa5d9a-e11c-4d20-84c9-2e051d492713 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950848, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.002225] env[63024]: DEBUG nova.compute.manager [req-0b941471-43f1-450b-a038-02b61e49ba85 req-1515898f-aab4-4fab-ba84-7b1b395c14b3 service nova] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Received event network-vif-plugged-c6c42da9-f98c-4f7e-94e7-39d45bc8f882 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1733.002225] env[63024]: DEBUG oslo_concurrency.lockutils [req-0b941471-43f1-450b-a038-02b61e49ba85 req-1515898f-aab4-4fab-ba84-7b1b395c14b3 service nova] Acquiring lock "c1fd4146-6dd3-49e9-a744-466e6168e158-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1733.006261] env[63024]: DEBUG oslo_concurrency.lockutils [req-0b941471-43f1-450b-a038-02b61e49ba85 req-1515898f-aab4-4fab-ba84-7b1b395c14b3 service nova] Lock "c1fd4146-6dd3-49e9-a744-466e6168e158-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1733.006261] env[63024]: DEBUG oslo_concurrency.lockutils [req-0b941471-43f1-450b-a038-02b61e49ba85 req-1515898f-aab4-4fab-ba84-7b1b395c14b3 service nova] Lock "c1fd4146-6dd3-49e9-a744-466e6168e158-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1733.006261] env[63024]: DEBUG nova.compute.manager [req-0b941471-43f1-450b-a038-02b61e49ba85 req-1515898f-aab4-4fab-ba84-7b1b395c14b3 service nova] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] No waiting events found dispatching network-vif-plugged-c6c42da9-f98c-4f7e-94e7-39d45bc8f882 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1733.006261] env[63024]: WARNING nova.compute.manager [req-0b941471-43f1-450b-a038-02b61e49ba85 req-1515898f-aab4-4fab-ba84-7b1b395c14b3 service nova] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Received unexpected event network-vif-plugged-c6c42da9-f98c-4f7e-94e7-39d45bc8f882 for instance with vm_state building and task_state spawning. [ 1733.145074] env[63024]: DEBUG nova.objects.instance [None req-e5892a03-6724-4137-9976-8580386b01d7 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lazy-loading 'pci_requests' on Instance uuid 9cf45c3a-2a74-4f8e-8817-47bbd748a44b {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1733.178126] env[63024]: DEBUG nova.network.neutron [None req-a4bf2dfa-2894-48ee-befe-9e9a3665b5dd tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Updating instance_info_cache with network_info: [{"id": "6e0e9732-b318-4b20-ad72-8c2bc07eaf34", "address": "fa:16:3e:2b:cc:65", "network": {"id": "0cbe22f7-6322-4d92-9a77-2753f6449a2d", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-366684254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.141", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6072e8931d9540ad8fe4a2b4b1ec782d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3ccbdbb-8b49-4a26-913f-2a448b72280f", "external-id": "nsx-vlan-transportzone-412", "segmentation_id": 412, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e0e9732-b3", "ovs_interfaceid": "6e0e9732-b318-4b20-ad72-8c2bc07eaf34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1733.227702] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Releasing lock "refresh_cache-c1fd4146-6dd3-49e9-a744-466e6168e158" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1733.228112] env[63024]: DEBUG nova.compute.manager [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Instance network_info: |[{"id": "c6c42da9-f98c-4f7e-94e7-39d45bc8f882", "address": "fa:16:3e:3f:14:c4", "network": {"id": "a1a2ebf5-0a3e-4f93-9a47-bd8cdab108ce", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1557111633-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5577b40f56af44eebd47761192e9510f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6c42da9-f9", "ovs_interfaceid": "c6c42da9-f98c-4f7e-94e7-39d45bc8f882", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1733.228353] env[63024]: DEBUG oslo_vmware.api [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950846, 'name': PowerOffVM_Task, 'duration_secs': 0.241568} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1733.231107] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3f:14:c4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cf5bfbae-a882-4d34-be33-b31e274b3077', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c6c42da9-f98c-4f7e-94e7-39d45bc8f882', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1733.239131] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Creating folder: Project (5577b40f56af44eebd47761192e9510f). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1733.239456] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1733.239690] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1733.241556] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-59615cc4-998f-466d-8142-19d403555445 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.243421] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3474c058-dcb9-4109-8d8b-d285436f322a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.255204] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1733.256132] env[63024]: DEBUG nova.compute.manager [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1733.269169] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-632ac5ab-4346-417e-836c-efa94fe32e09 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.275490] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Created folder: Project (5577b40f56af44eebd47761192e9510f) in parent group-v401959. [ 1733.275490] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Creating folder: Instances. Parent ref: group-v402100. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1733.279023] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5a5894a0-ec63-437f-81e5-283d5f7e1397 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.295290] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Created folder: Instances in parent group-v402100. [ 1733.295592] env[63024]: DEBUG oslo.service.loopingcall [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1733.295801] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1733.296023] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-45c46aef-9927-4df8-8e5b-5479be594415 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.316865] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1733.324232] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1733.324232] env[63024]: value = "task-1950854" [ 1733.324232] env[63024]: _type = "Task" [ 1733.324232] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1733.333190] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950854, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.345937] env[63024]: DEBUG oslo_vmware.api [None req-33fa5d9a-e11c-4d20-84c9-2e051d492713 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Task: {'id': task-1950848, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.289605} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1733.348031] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-33fa5d9a-e11c-4d20-84c9-2e051d492713 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1733.348384] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-33fa5d9a-e11c-4d20-84c9-2e051d492713 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1733.348692] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-33fa5d9a-e11c-4d20-84c9-2e051d492713 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1733.349200] env[63024]: INFO nova.compute.manager [None req-33fa5d9a-e11c-4d20-84c9-2e051d492713 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1733.349289] env[63024]: DEBUG oslo.service.loopingcall [None req-33fa5d9a-e11c-4d20-84c9-2e051d492713 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1733.350412] env[63024]: DEBUG nova.network.neutron [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Successfully created port: 5dca7a03-21d2-431e-95cc-a6baa1929b65 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1733.353035] env[63024]: DEBUG nova.compute.manager [-] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1733.353172] env[63024]: DEBUG nova.network.neutron [-] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1733.378558] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1733.378848] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1733.379099] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Deleting the datastore file [datastore1] 8edc24d6-9073-4836-b14b-422df3ac1b88 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1733.379743] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e8441024-ed99-4334-9832-9b4cce700882 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.387562] env[63024]: DEBUG oslo_vmware.api [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1733.387562] env[63024]: value = "task-1950855" [ 1733.387562] env[63024]: _type = "Task" [ 1733.387562] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1733.394973] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e019400e-dc01-4c85-8d81-e26d94496a7c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.410141] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d22e77c7-1533-4dbc-92a3-c90bb1fd85d5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.414627] env[63024]: DEBUG oslo_vmware.api [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950855, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.449725] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85a7c8de-4b00-468f-8e38-5e1947c2222f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.461794] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc1d1269-d250-4ddb-95f4-1a6ff47a7219 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.476751] env[63024]: DEBUG nova.compute.provider_tree [None req-05840052-0709-42e5-b3e4-1e227ebc43d1 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1733.654018] env[63024]: DEBUG nova.objects.base [None req-e5892a03-6724-4137-9976-8580386b01d7 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Object Instance<9cf45c3a-2a74-4f8e-8817-47bbd748a44b> lazy-loaded attributes: flavor,pci_requests {{(pid=63024) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1733.654018] env[63024]: DEBUG nova.network.neutron [None req-e5892a03-6724-4137-9976-8580386b01d7 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1733.681822] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a4bf2dfa-2894-48ee-befe-9e9a3665b5dd tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Releasing lock "refresh_cache-9716d592-32d1-4f1d-b42b-1c8a7d81d2f2" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1733.717551] env[63024]: DEBUG nova.policy [None req-e5892a03-6724-4137-9976-8580386b01d7 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6fc84a6eed984429b26a693ce7b0876e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9521048e807c4ca2a6d2e74a72b829a3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1733.836382] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950854, 'name': CreateVM_Task, 'duration_secs': 0.502983} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1733.836663] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1733.837523] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1733.837805] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1733.838221] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1733.838589] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31c17fe0-29f5-4b3a-bf44-f090156bd58c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1733.847022] env[63024]: DEBUG oslo_vmware.api [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1733.847022] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5237f1f6-c463-e0e5-e773-2cf4b8d17bdb" [ 1733.847022] env[63024]: _type = "Task" [ 1733.847022] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1733.857383] env[63024]: DEBUG oslo_vmware.api [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5237f1f6-c463-e0e5-e773-2cf4b8d17bdb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1733.897944] env[63024]: DEBUG oslo_vmware.api [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950855, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.197805} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1733.898251] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1733.898450] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1733.898628] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1733.981339] env[63024]: DEBUG nova.scheduler.client.report [None req-05840052-0709-42e5-b3e4-1e227ebc43d1 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1733.989915] env[63024]: DEBUG nova.compute.manager [req-bace2b4b-1806-4537-9279-ce51d2cb44ac req-c8ef7c7d-168a-446b-8051-8d750f186773 service nova] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Received event network-vif-deleted-e82533e3-2173-4dc5-911c-829fa32117ad {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1733.990034] env[63024]: DEBUG nova.compute.manager [req-bace2b4b-1806-4537-9279-ce51d2cb44ac req-c8ef7c7d-168a-446b-8051-8d750f186773 service nova] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Received event network-vif-deleted-7f7d9f32-5f3e-4ba0-afc0-270579e87766 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1734.077677] env[63024]: DEBUG nova.network.neutron [None req-e5892a03-6724-4137-9976-8580386b01d7 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Successfully created port: e04e8bec-aaf6-4150-8aa3-16baf5d05b05 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1734.207992] env[63024]: DEBUG nova.network.neutron [-] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1734.286266] env[63024]: DEBUG nova.compute.manager [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1734.314652] env[63024]: DEBUG nova.virt.hardware [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1734.320455] env[63024]: DEBUG nova.virt.hardware [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1734.320455] env[63024]: DEBUG nova.virt.hardware [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1734.320455] env[63024]: DEBUG nova.virt.hardware [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1734.320455] env[63024]: DEBUG nova.virt.hardware [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1734.320455] env[63024]: DEBUG nova.virt.hardware [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1734.320455] env[63024]: DEBUG nova.virt.hardware [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1734.320455] env[63024]: DEBUG nova.virt.hardware [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1734.320455] env[63024]: DEBUG nova.virt.hardware [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1734.320455] env[63024]: DEBUG nova.virt.hardware [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1734.320455] env[63024]: DEBUG nova.virt.hardware [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1734.320455] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29c42daa-7e28-4172-98ab-bc0fe2759bee {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.330645] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4e27738-1c73-4012-8703-9c5ee55237b0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.366450] env[63024]: DEBUG oslo_vmware.api [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5237f1f6-c463-e0e5-e773-2cf4b8d17bdb, 'name': SearchDatastore_Task, 'duration_secs': 0.027507} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1734.366450] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1734.366450] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1734.366450] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1734.366450] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1734.366450] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1734.366450] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8a15daeb-fa6a-48c8-ad02-3c7e049618bb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.377023] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1734.377023] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1734.377023] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02a7e6d6-645e-4076-b91b-722f20b557b8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.382782] env[63024]: DEBUG oslo_vmware.api [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1734.382782] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5202b51c-6581-13d4-55e0-86ba49f8e803" [ 1734.382782] env[63024]: _type = "Task" [ 1734.382782] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1734.391712] env[63024]: DEBUG oslo_vmware.api [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5202b51c-6581-13d4-55e0-86ba49f8e803, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.489019] env[63024]: DEBUG oslo_concurrency.lockutils [None req-05840052-0709-42e5-b3e4-1e227ebc43d1 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.240s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1734.489019] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 32.061s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1734.489019] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1734.489019] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63024) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1734.490037] env[63024]: DEBUG oslo_concurrency.lockutils [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.996s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1734.490920] env[63024]: DEBUG nova.objects.instance [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Lazy-loading 'resources' on Instance uuid ccd80e20-9fc2-415a-a428-fcf85994c7f8 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1734.494018] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b1be3a0-10d7-4c36-9b19-ce678339c5c4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.507495] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4326eeca-e554-4e61-9ddb-30e78d2e2e32 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.512390] env[63024]: INFO nova.scheduler.client.report [None req-05840052-0709-42e5-b3e4-1e227ebc43d1 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Deleted allocations for instance 61fdfa06-cb40-44a3-8abc-428b26bd40f5 [ 1734.533787] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-681123a8-201c-4984-a8d6-fcd7f8ef7d64 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.543423] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-434df24d-a7b8-4ceb-bb58-04a9201d1521 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.578974] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178877MB free_disk=170GB free_vcpus=48 pci_devices=None {{(pid=63024) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1734.579252] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1734.689961] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4bf2dfa-2894-48ee-befe-9e9a3665b5dd tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1734.690411] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7ee9dfca-4690-44c2-a051-cb75657b7230 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.700451] env[63024]: DEBUG oslo_vmware.api [None req-a4bf2dfa-2894-48ee-befe-9e9a3665b5dd tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 1734.700451] env[63024]: value = "task-1950856" [ 1734.700451] env[63024]: _type = "Task" [ 1734.700451] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1734.709601] env[63024]: INFO nova.compute.manager [-] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Took 1.36 seconds to deallocate network for instance. [ 1734.725532] env[63024]: DEBUG oslo_vmware.api [None req-a4bf2dfa-2894-48ee-befe-9e9a3665b5dd tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1950856, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.896657] env[63024]: DEBUG oslo_vmware.api [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5202b51c-6581-13d4-55e0-86ba49f8e803, 'name': SearchDatastore_Task, 'duration_secs': 0.033837} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1734.896657] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a0b01c85-64f4-4355-ada3-2362f9d76257 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.905496] env[63024]: DEBUG oslo_vmware.api [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1734.905496] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52be5718-5d9e-ddb2-6a1a-2da8256df641" [ 1734.905496] env[63024]: _type = "Task" [ 1734.905496] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1734.919237] env[63024]: DEBUG oslo_vmware.api [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52be5718-5d9e-ddb2-6a1a-2da8256df641, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1734.938069] env[63024]: DEBUG nova.virt.hardware [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1734.938069] env[63024]: DEBUG nova.virt.hardware [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1734.938069] env[63024]: DEBUG nova.virt.hardware [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1734.938069] env[63024]: DEBUG nova.virt.hardware [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1734.938069] env[63024]: DEBUG nova.virt.hardware [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1734.938069] env[63024]: DEBUG nova.virt.hardware [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1734.938069] env[63024]: DEBUG nova.virt.hardware [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1734.938069] env[63024]: DEBUG nova.virt.hardware [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1734.938069] env[63024]: DEBUG nova.virt.hardware [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1734.938069] env[63024]: DEBUG nova.virt.hardware [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1734.938069] env[63024]: DEBUG nova.virt.hardware [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1734.938672] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3b6cfbc-6302-42be-b9fc-ee79ed25adfb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.946994] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad8bd8da-24ce-405c-a9f6-4c2701f0f81b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1734.969682] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2c:08:94', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'abe48956-848a-4e1f-b1f1-a27baa5390b9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cdff8c9b-4ae6-45d3-8a7b-ef1268f2b42e', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1734.978732] env[63024]: DEBUG oslo.service.loopingcall [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1734.978732] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1734.979111] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-65ab140a-b597-47f6-aed9-ded750131a42 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.001578] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1735.001578] env[63024]: value = "task-1950858" [ 1735.001578] env[63024]: _type = "Task" [ 1735.001578] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1735.013050] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950858, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.023516] env[63024]: DEBUG oslo_concurrency.lockutils [None req-05840052-0709-42e5-b3e4-1e227ebc43d1 tempest-VolumesAssistedSnapshotsTest-1323043947 tempest-VolumesAssistedSnapshotsTest-1323043947-project-member] Lock "61fdfa06-cb40-44a3-8abc-428b26bd40f5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.885s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1735.048051] env[63024]: DEBUG nova.network.neutron [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Successfully updated port: 5dca7a03-21d2-431e-95cc-a6baa1929b65 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1735.159498] env[63024]: DEBUG oslo_concurrency.lockutils [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Acquiring lock "02db92ec-3377-406b-a95c-0022579fa75b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1735.159809] env[63024]: DEBUG oslo_concurrency.lockutils [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Lock "02db92ec-3377-406b-a95c-0022579fa75b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1735.215553] env[63024]: DEBUG oslo_vmware.api [None req-a4bf2dfa-2894-48ee-befe-9e9a3665b5dd tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1950856, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.226296] env[63024]: DEBUG oslo_concurrency.lockutils [None req-33fa5d9a-e11c-4d20-84c9-2e051d492713 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1735.415241] env[63024]: DEBUG oslo_vmware.api [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52be5718-5d9e-ddb2-6a1a-2da8256df641, 'name': SearchDatastore_Task, 'duration_secs': 0.014502} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1735.415514] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1735.415775] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] c1fd4146-6dd3-49e9-a744-466e6168e158/c1fd4146-6dd3-49e9-a744-466e6168e158.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1735.416059] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0c629776-7a0b-471f-a59c-1fb61789f2ed {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.425960] env[63024]: DEBUG oslo_vmware.api [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1735.425960] env[63024]: value = "task-1950859" [ 1735.425960] env[63024]: _type = "Task" [ 1735.425960] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1735.436165] env[63024]: DEBUG oslo_vmware.api [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1950859, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.487987] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85ce027e-89d8-410e-b6bd-510c403efe5d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.496696] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbf218e1-b5e1-4e39-8c1c-704f88f0808f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.533529] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eefc147-a689-43db-8f0a-92b8878adcc5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.539344] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950858, 'name': CreateVM_Task, 'duration_secs': 0.368062} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1735.540135] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1735.540674] env[63024]: DEBUG oslo_concurrency.lockutils [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1735.540834] env[63024]: DEBUG oslo_concurrency.lockutils [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1735.541454] env[63024]: DEBUG oslo_concurrency.lockutils [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1735.541839] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81c35bab-0cad-444e-a80a-45364bc4d3e0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.547801] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-003b9204-cb14-4816-9a5b-e50e2d4f431f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.554234] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquiring lock "refresh_cache-82b7019c-5049-4b8b-abb4-46f326ce3d5b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1735.554234] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquired lock "refresh_cache-82b7019c-5049-4b8b-abb4-46f326ce3d5b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1735.554317] env[63024]: DEBUG nova.network.neutron [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1735.555455] env[63024]: DEBUG oslo_vmware.api [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1735.555455] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]529b3170-34b6-5387-fe73-cb2b0e451e0f" [ 1735.555455] env[63024]: _type = "Task" [ 1735.555455] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1735.567673] env[63024]: DEBUG nova.compute.provider_tree [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1735.575568] env[63024]: DEBUG oslo_vmware.api [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]529b3170-34b6-5387-fe73-cb2b0e451e0f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1735.662118] env[63024]: DEBUG nova.compute.manager [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1735.711678] env[63024]: DEBUG oslo_vmware.api [None req-a4bf2dfa-2894-48ee-befe-9e9a3665b5dd tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1950856, 'name': PowerOnVM_Task, 'duration_secs': 0.594338} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1735.711930] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4bf2dfa-2894-48ee-befe-9e9a3665b5dd tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1735.712142] env[63024]: DEBUG nova.compute.manager [None req-a4bf2dfa-2894-48ee-befe-9e9a3665b5dd tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1735.712888] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65cd2ff2-146e-408a-bbf7-f1077c229a9a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1735.817178] env[63024]: DEBUG nova.network.neutron [None req-e5892a03-6724-4137-9976-8580386b01d7 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Successfully updated port: e04e8bec-aaf6-4150-8aa3-16baf5d05b05 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1735.935961] env[63024]: DEBUG oslo_vmware.api [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1950859, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.077904] env[63024]: DEBUG oslo_vmware.api [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]529b3170-34b6-5387-fe73-cb2b0e451e0f, 'name': SearchDatastore_Task, 'duration_secs': 0.025972} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1736.078719] env[63024]: DEBUG oslo_concurrency.lockutils [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1736.079114] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1736.079802] env[63024]: DEBUG oslo_concurrency.lockutils [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1736.079802] env[63024]: DEBUG oslo_concurrency.lockutils [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1736.081072] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1736.081072] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0a365700-8562-43f9-b171-727e3c4f4231 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.094857] env[63024]: DEBUG nova.network.neutron [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1736.098775] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1736.098983] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1736.100493] env[63024]: ERROR nova.scheduler.client.report [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [req-14842947-3593-48d7-b7cb-2e0b7d44635a] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 89dfa68a-133e-436f-a9f1-86051f9fb96b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-14842947-3593-48d7-b7cb-2e0b7d44635a"}]} [ 1736.100839] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c73c9354-f6d2-4203-837e-1bc9685845bf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.111901] env[63024]: DEBUG oslo_vmware.api [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1736.111901] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52c1a673-94b1-a592-f48a-369bc7a15a29" [ 1736.111901] env[63024]: _type = "Task" [ 1736.111901] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1736.124068] env[63024]: DEBUG oslo_vmware.api [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52c1a673-94b1-a592-f48a-369bc7a15a29, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.125610] env[63024]: DEBUG nova.scheduler.client.report [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Refreshing inventories for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1736.144375] env[63024]: DEBUG nova.scheduler.client.report [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Updating ProviderTree inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1736.145070] env[63024]: DEBUG nova.compute.provider_tree [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1736.162676] env[63024]: DEBUG nova.scheduler.client.report [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Refreshing aggregate associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, aggregates: None {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1736.182912] env[63024]: DEBUG nova.scheduler.client.report [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Refreshing trait associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1736.191088] env[63024]: DEBUG oslo_concurrency.lockutils [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1736.256830] env[63024]: DEBUG nova.compute.manager [None req-c781d4bd-5767-4107-89dc-10b704ae11d8 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 37792b57-3347-4134-a060-53359afa3298] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1736.258420] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cacfafcb-0d46-4607-a42d-7a44d876299e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.320462] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e5892a03-6724-4137-9976-8580386b01d7 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "refresh_cache-9cf45c3a-2a74-4f8e-8817-47bbd748a44b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1736.321169] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e5892a03-6724-4137-9976-8580386b01d7 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquired lock "refresh_cache-9cf45c3a-2a74-4f8e-8817-47bbd748a44b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1736.321169] env[63024]: DEBUG nova.network.neutron [None req-e5892a03-6724-4137-9976-8580386b01d7 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1736.330757] env[63024]: DEBUG nova.network.neutron [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Updating instance_info_cache with network_info: [{"id": "5dca7a03-21d2-431e-95cc-a6baa1929b65", "address": "fa:16:3e:fb:f6:ec", "network": {"id": "a1a2ebf5-0a3e-4f93-9a47-bd8cdab108ce", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1557111633-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5577b40f56af44eebd47761192e9510f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5dca7a03-21", "ovs_interfaceid": "5dca7a03-21d2-431e-95cc-a6baa1929b65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1736.439297] env[63024]: DEBUG oslo_vmware.api [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1950859, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.953271} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1736.439615] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] c1fd4146-6dd3-49e9-a744-466e6168e158/c1fd4146-6dd3-49e9-a744-466e6168e158.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1736.440308] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1736.440550] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-807aabe8-38b4-4060-b4d2-4f06795503da {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.448484] env[63024]: DEBUG oslo_vmware.api [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1736.448484] env[63024]: value = "task-1950860" [ 1736.448484] env[63024]: _type = "Task" [ 1736.448484] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1736.456887] env[63024]: DEBUG oslo_vmware.api [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1950860, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.613918] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75b4aaf6-ffaa-4581-8b65-f16445636d85 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.629079] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cf07d05-c42c-4131-8707-078af0e51215 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.632418] env[63024]: DEBUG oslo_vmware.api [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52c1a673-94b1-a592-f48a-369bc7a15a29, 'name': SearchDatastore_Task, 'duration_secs': 0.057859} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1736.633602] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3b11908-9b5e-47cf-b5af-823211e04afb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.670018] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e01be29e-19d9-48cf-aedb-2a0d48224c29 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.678246] env[63024]: DEBUG nova.compute.manager [req-2f7307c1-f64c-4872-a018-882996efeeb7 req-621685ef-a746-45c1-83af-6f844d8e106a service nova] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Received event network-changed-c6c42da9-f98c-4f7e-94e7-39d45bc8f882 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1736.678457] env[63024]: DEBUG nova.compute.manager [req-2f7307c1-f64c-4872-a018-882996efeeb7 req-621685ef-a746-45c1-83af-6f844d8e106a service nova] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Refreshing instance network info cache due to event network-changed-c6c42da9-f98c-4f7e-94e7-39d45bc8f882. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1736.678666] env[63024]: DEBUG oslo_concurrency.lockutils [req-2f7307c1-f64c-4872-a018-882996efeeb7 req-621685ef-a746-45c1-83af-6f844d8e106a service nova] Acquiring lock "refresh_cache-c1fd4146-6dd3-49e9-a744-466e6168e158" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1736.678808] env[63024]: DEBUG oslo_concurrency.lockutils [req-2f7307c1-f64c-4872-a018-882996efeeb7 req-621685ef-a746-45c1-83af-6f844d8e106a service nova] Acquired lock "refresh_cache-c1fd4146-6dd3-49e9-a744-466e6168e158" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1736.678962] env[63024]: DEBUG nova.network.neutron [req-2f7307c1-f64c-4872-a018-882996efeeb7 req-621685ef-a746-45c1-83af-6f844d8e106a service nova] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Refreshing network info cache for port c6c42da9-f98c-4f7e-94e7-39d45bc8f882 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1736.681587] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "6156ce17-3f29-487a-afc5-2fa0fb7f114c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1736.681792] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "6156ce17-3f29-487a-afc5-2fa0fb7f114c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1736.682054] env[63024]: DEBUG oslo_vmware.api [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1736.682054] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5288e047-39d4-4a3c-702d-e44e86a3e921" [ 1736.682054] env[63024]: _type = "Task" [ 1736.682054] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1736.690345] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acb5c73e-8d3a-4e6d-888b-68b1c65cad7f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.697828] env[63024]: DEBUG oslo_vmware.api [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5288e047-39d4-4a3c-702d-e44e86a3e921, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.708646] env[63024]: DEBUG nova.compute.provider_tree [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1736.775065] env[63024]: INFO nova.compute.manager [None req-c781d4bd-5767-4107-89dc-10b704ae11d8 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 37792b57-3347-4134-a060-53359afa3298] instance snapshotting [ 1736.777840] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f06d876a-dc78-4d68-aee2-8048439df88a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.798652] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81afd0d2-e0bb-4bf5-ad40-7eb3a2cc4711 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.839378] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Releasing lock "refresh_cache-82b7019c-5049-4b8b-abb4-46f326ce3d5b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1736.839378] env[63024]: DEBUG nova.compute.manager [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Instance network_info: |[{"id": "5dca7a03-21d2-431e-95cc-a6baa1929b65", "address": "fa:16:3e:fb:f6:ec", "network": {"id": "a1a2ebf5-0a3e-4f93-9a47-bd8cdab108ce", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1557111633-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5577b40f56af44eebd47761192e9510f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5dca7a03-21", "ovs_interfaceid": "5dca7a03-21d2-431e-95cc-a6baa1929b65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1736.841038] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fb:f6:ec', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cf5bfbae-a882-4d34-be33-b31e274b3077', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5dca7a03-21d2-431e-95cc-a6baa1929b65', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1736.850302] env[63024]: DEBUG oslo.service.loopingcall [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1736.850801] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1736.851341] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cfa2dffb-5cc5-4697-b104-b9cb0963b4d2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1736.875181] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1736.875181] env[63024]: value = "task-1950861" [ 1736.875181] env[63024]: _type = "Task" [ 1736.875181] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1736.879706] env[63024]: WARNING nova.network.neutron [None req-e5892a03-6724-4137-9976-8580386b01d7 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] ffb24eaf-c6b6-414f-a69a-0c8806712ddd already exists in list: networks containing: ['ffb24eaf-c6b6-414f-a69a-0c8806712ddd']. ignoring it [ 1736.887744] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950861, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1736.961379] env[63024]: DEBUG oslo_vmware.api [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1950860, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.070918] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb55bb59-ab72-4dee-8b5d-9286fef32c95 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Volume attach. Driver type: vmdk {{(pid=63024) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1737.071198] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb55bb59-ab72-4dee-8b5d-9286fef32c95 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402099', 'volume_id': 'eb2fb348-971e-4d2b-9510-bd374d5998e0', 'name': 'volume-eb2fb348-971e-4d2b-9510-bd374d5998e0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'b7f26f0e-d5a9-42a6-8af2-065659f89cf5', 'attached_at': '', 'detached_at': '', 'volume_id': 'eb2fb348-971e-4d2b-9510-bd374d5998e0', 'serial': 'eb2fb348-971e-4d2b-9510-bd374d5998e0'} {{(pid=63024) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1737.072126] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-921f2325-bd8f-45f0-98ec-ae7d7ff8c114 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.092431] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f850de9-f2b0-44ff-bbb7-d3a1cff1f87d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.118057] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb55bb59-ab72-4dee-8b5d-9286fef32c95 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Reconfiguring VM instance instance-00000019 to attach disk [datastore1] volume-eb2fb348-971e-4d2b-9510-bd374d5998e0/volume-eb2fb348-971e-4d2b-9510-bd374d5998e0.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1737.118412] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-58df38d3-b0e4-495a-82a7-dd3b9abd76d7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.141548] env[63024]: DEBUG nova.compute.manager [req-75f91d10-c84f-431e-898a-8baaa322029b req-46c1f1a4-3cd0-4327-9b0f-8d17e0730502 service nova] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Received event network-vif-deleted-a3574d89-a818-4dbd-bf07-78ac14b00783 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1737.143447] env[63024]: DEBUG oslo_vmware.api [None req-fb55bb59-ab72-4dee-8b5d-9286fef32c95 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Waiting for the task: (returnval){ [ 1737.143447] env[63024]: value = "task-1950862" [ 1737.143447] env[63024]: _type = "Task" [ 1737.143447] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1737.153173] env[63024]: DEBUG oslo_vmware.api [None req-fb55bb59-ab72-4dee-8b5d-9286fef32c95 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': task-1950862, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.195983] env[63024]: DEBUG oslo_vmware.api [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5288e047-39d4-4a3c-702d-e44e86a3e921, 'name': SearchDatastore_Task, 'duration_secs': 0.02758} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1737.196303] env[63024]: DEBUG oslo_concurrency.lockutils [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1737.196567] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 8edc24d6-9073-4836-b14b-422df3ac1b88/8edc24d6-9073-4836-b14b-422df3ac1b88.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1737.196828] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5b8fcf7d-5f2c-43ee-8afa-454e13a1da64 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.206364] env[63024]: DEBUG oslo_vmware.api [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1737.206364] env[63024]: value = "task-1950863" [ 1737.206364] env[63024]: _type = "Task" [ 1737.206364] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1737.219386] env[63024]: DEBUG oslo_vmware.api [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950863, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.230214] env[63024]: ERROR nova.scheduler.client.report [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] [req-f0ed6e1a-db10-4eed-be1a-862f8e383f9f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 89dfa68a-133e-436f-a9f1-86051f9fb96b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f0ed6e1a-db10-4eed-be1a-862f8e383f9f"}]} [ 1737.247278] env[63024]: DEBUG nova.scheduler.client.report [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Refreshing inventories for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1737.262167] env[63024]: DEBUG nova.scheduler.client.report [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Updating ProviderTree inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1737.262444] env[63024]: DEBUG nova.compute.provider_tree [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1737.277949] env[63024]: DEBUG nova.scheduler.client.report [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Refreshing aggregate associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, aggregates: None {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1737.302731] env[63024]: DEBUG nova.scheduler.client.report [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Refreshing trait associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1737.313782] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fafcb525-9d6d-4370-8611-bc6302ab109d tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Acquiring lock "18444b47-476a-4ca3-9a4f-0dc58e652143" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1737.314139] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fafcb525-9d6d-4370-8611-bc6302ab109d tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Lock "18444b47-476a-4ca3-9a4f-0dc58e652143" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1737.314404] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fafcb525-9d6d-4370-8611-bc6302ab109d tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Acquiring lock "18444b47-476a-4ca3-9a4f-0dc58e652143-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1737.314645] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fafcb525-9d6d-4370-8611-bc6302ab109d tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Lock "18444b47-476a-4ca3-9a4f-0dc58e652143-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1737.314846] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fafcb525-9d6d-4370-8611-bc6302ab109d tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Lock "18444b47-476a-4ca3-9a4f-0dc58e652143-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1737.319135] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c781d4bd-5767-4107-89dc-10b704ae11d8 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 37792b57-3347-4134-a060-53359afa3298] Creating Snapshot of the VM instance {{(pid=63024) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1737.320259] env[63024]: INFO nova.compute.manager [None req-fafcb525-9d6d-4370-8611-bc6302ab109d tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Terminating instance [ 1737.322890] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-e816d2aa-186d-45fe-94a2-5f3546819083 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.340644] env[63024]: DEBUG oslo_vmware.api [None req-c781d4bd-5767-4107-89dc-10b704ae11d8 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1737.340644] env[63024]: value = "task-1950864" [ 1737.340644] env[63024]: _type = "Task" [ 1737.340644] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1737.351151] env[63024]: DEBUG oslo_vmware.api [None req-c781d4bd-5767-4107-89dc-10b704ae11d8 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950864, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.386495] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950861, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.467253] env[63024]: DEBUG oslo_vmware.api [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1950860, 'name': ExtendVirtualDisk_Task, 'duration_secs': 1.010064} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1737.471983] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1737.477584] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9e1edff-6792-4e92-b244-3fbf42858a25 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.509699] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Reconfiguring VM instance instance-0000002d to attach disk [datastore1] c1fd4146-6dd3-49e9-a744-466e6168e158/c1fd4146-6dd3-49e9-a744-466e6168e158.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1737.515258] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-16bf6ad2-deae-4090-9d3c-760a255bdae9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.546954] env[63024]: DEBUG oslo_vmware.api [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1737.546954] env[63024]: value = "task-1950865" [ 1737.546954] env[63024]: _type = "Task" [ 1737.546954] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1737.556451] env[63024]: DEBUG oslo_vmware.api [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1950865, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.656717] env[63024]: DEBUG oslo_vmware.api [None req-fb55bb59-ab72-4dee-8b5d-9286fef32c95 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': task-1950862, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.718443] env[63024]: DEBUG oslo_vmware.api [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950863, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.739537] env[63024]: DEBUG nova.network.neutron [None req-e5892a03-6724-4137-9976-8580386b01d7 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Updating instance_info_cache with network_info: [{"id": "182496b0-1eb9-4c3a-a2b9-4f3dec86f48c", "address": "fa:16:3e:2f:45:02", "network": {"id": "ffb24eaf-c6b6-414f-a69a-0c8806712ddd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-281590202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9521048e807c4ca2a6d2e74a72b829a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap182496b0-1e", "ovs_interfaceid": "182496b0-1eb9-4c3a-a2b9-4f3dec86f48c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e04e8bec-aaf6-4150-8aa3-16baf5d05b05", "address": "fa:16:3e:b9:3b:92", "network": {"id": "ffb24eaf-c6b6-414f-a69a-0c8806712ddd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-281590202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9521048e807c4ca2a6d2e74a72b829a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape04e8bec-aa", "ovs_interfaceid": "e04e8bec-aaf6-4150-8aa3-16baf5d05b05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1737.810622] env[63024]: DEBUG nova.network.neutron [req-2f7307c1-f64c-4872-a018-882996efeeb7 req-621685ef-a746-45c1-83af-6f844d8e106a service nova] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Updated VIF entry in instance network info cache for port c6c42da9-f98c-4f7e-94e7-39d45bc8f882. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1737.811199] env[63024]: DEBUG nova.network.neutron [req-2f7307c1-f64c-4872-a018-882996efeeb7 req-621685ef-a746-45c1-83af-6f844d8e106a service nova] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Updating instance_info_cache with network_info: [{"id": "c6c42da9-f98c-4f7e-94e7-39d45bc8f882", "address": "fa:16:3e:3f:14:c4", "network": {"id": "a1a2ebf5-0a3e-4f93-9a47-bd8cdab108ce", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1557111633-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5577b40f56af44eebd47761192e9510f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc6c42da9-f9", "ovs_interfaceid": "c6c42da9-f98c-4f7e-94e7-39d45bc8f882", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1737.830773] env[63024]: DEBUG nova.compute.manager [None req-fafcb525-9d6d-4370-8611-bc6302ab109d tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1737.831474] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-fafcb525-9d6d-4370-8611-bc6302ab109d tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1737.832471] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03b6b9d2-ea6b-4b4a-8035-9caaedc00a6d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.844028] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-fafcb525-9d6d-4370-8611-bc6302ab109d tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1737.846248] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f6318e7d-010a-4a17-ac13-5cf897806ea6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.859410] env[63024]: DEBUG oslo_vmware.api [None req-c781d4bd-5767-4107-89dc-10b704ae11d8 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950864, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.861281] env[63024]: DEBUG oslo_vmware.api [None req-fafcb525-9d6d-4370-8611-bc6302ab109d tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Waiting for the task: (returnval){ [ 1737.861281] env[63024]: value = "task-1950866" [ 1737.861281] env[63024]: _type = "Task" [ 1737.861281] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1737.870991] env[63024]: DEBUG oslo_vmware.api [None req-fafcb525-9d6d-4370-8611-bc6302ab109d tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Task: {'id': task-1950866, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.886171] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950861, 'name': CreateVM_Task, 'duration_secs': 0.607721} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1737.889079] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1737.890182] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1737.890267] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1737.890614] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1737.890934] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4caf49a-48eb-419f-8d56-f903a61d4213 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.898535] env[63024]: DEBUG oslo_vmware.api [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1737.898535] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52fa5075-1075-e239-7561-43dda9bc7080" [ 1737.898535] env[63024]: _type = "Task" [ 1737.898535] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1737.917686] env[63024]: DEBUG oslo_vmware.api [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52fa5075-1075-e239-7561-43dda9bc7080, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1737.923589] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12d3ba3f-8517-4227-b143-c1c1b8fed04b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.930046] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10c392c7-f28f-47b1-b290-08981810a41a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.966533] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3920cf5-9fa3-4e4e-8c8c-0be699f81fd0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.975594] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44977845-3268-43cc-9445-d72d9808b62e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1737.992018] env[63024]: DEBUG nova.compute.provider_tree [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1738.057628] env[63024]: DEBUG oslo_vmware.api [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1950865, 'name': ReconfigVM_Task, 'duration_secs': 0.493576} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1738.058125] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Reconfigured VM instance instance-0000002d to attach disk [datastore1] c1fd4146-6dd3-49e9-a744-466e6168e158/c1fd4146-6dd3-49e9-a744-466e6168e158.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1738.059862] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f26f2319-89b2-4549-bcb6-3976ab363db8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.067646] env[63024]: DEBUG oslo_vmware.api [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1738.067646] env[63024]: value = "task-1950867" [ 1738.067646] env[63024]: _type = "Task" [ 1738.067646] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.078969] env[63024]: DEBUG oslo_vmware.api [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1950867, 'name': Rename_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.156037] env[63024]: DEBUG oslo_vmware.api [None req-fb55bb59-ab72-4dee-8b5d-9286fef32c95 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': task-1950862, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.220460] env[63024]: DEBUG oslo_vmware.api [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950863, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.766878} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1738.220460] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 8edc24d6-9073-4836-b14b-422df3ac1b88/8edc24d6-9073-4836-b14b-422df3ac1b88.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1738.220460] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1738.220853] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7fac330e-92af-42f1-85f7-55e058dfd00d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.230224] env[63024]: DEBUG oslo_vmware.api [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1738.230224] env[63024]: value = "task-1950868" [ 1738.230224] env[63024]: _type = "Task" [ 1738.230224] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.242057] env[63024]: DEBUG oslo_vmware.api [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950868, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.242725] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e5892a03-6724-4137-9976-8580386b01d7 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Releasing lock "refresh_cache-9cf45c3a-2a74-4f8e-8817-47bbd748a44b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1738.243565] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e5892a03-6724-4137-9976-8580386b01d7 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "9cf45c3a-2a74-4f8e-8817-47bbd748a44b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1738.243815] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e5892a03-6724-4137-9976-8580386b01d7 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquired lock "9cf45c3a-2a74-4f8e-8817-47bbd748a44b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1738.244974] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58fcec42-9659-44c9-9350-070605aed09f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.265714] env[63024]: DEBUG nova.virt.hardware [None req-e5892a03-6724-4137-9976-8580386b01d7 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1738.265981] env[63024]: DEBUG nova.virt.hardware [None req-e5892a03-6724-4137-9976-8580386b01d7 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1738.266114] env[63024]: DEBUG nova.virt.hardware [None req-e5892a03-6724-4137-9976-8580386b01d7 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1738.266296] env[63024]: DEBUG nova.virt.hardware [None req-e5892a03-6724-4137-9976-8580386b01d7 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1738.266460] env[63024]: DEBUG nova.virt.hardware [None req-e5892a03-6724-4137-9976-8580386b01d7 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1738.266611] env[63024]: DEBUG nova.virt.hardware [None req-e5892a03-6724-4137-9976-8580386b01d7 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1738.266820] env[63024]: DEBUG nova.virt.hardware [None req-e5892a03-6724-4137-9976-8580386b01d7 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1738.266993] env[63024]: DEBUG nova.virt.hardware [None req-e5892a03-6724-4137-9976-8580386b01d7 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1738.267636] env[63024]: DEBUG nova.virt.hardware [None req-e5892a03-6724-4137-9976-8580386b01d7 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1738.267636] env[63024]: DEBUG nova.virt.hardware [None req-e5892a03-6724-4137-9976-8580386b01d7 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1738.267636] env[63024]: DEBUG nova.virt.hardware [None req-e5892a03-6724-4137-9976-8580386b01d7 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1738.274542] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e5892a03-6724-4137-9976-8580386b01d7 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Reconfiguring VM to attach interface {{(pid=63024) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1738.275431] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1238873d-430b-4427-86ce-1dd277e84d6f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.297541] env[63024]: DEBUG oslo_vmware.api [None req-e5892a03-6724-4137-9976-8580386b01d7 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 1738.297541] env[63024]: value = "task-1950869" [ 1738.297541] env[63024]: _type = "Task" [ 1738.297541] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.310422] env[63024]: DEBUG oslo_vmware.api [None req-e5892a03-6724-4137-9976-8580386b01d7 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1950869, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.316198] env[63024]: DEBUG oslo_concurrency.lockutils [req-2f7307c1-f64c-4872-a018-882996efeeb7 req-621685ef-a746-45c1-83af-6f844d8e106a service nova] Releasing lock "refresh_cache-c1fd4146-6dd3-49e9-a744-466e6168e158" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1738.317342] env[63024]: DEBUG nova.compute.manager [req-2f7307c1-f64c-4872-a018-882996efeeb7 req-621685ef-a746-45c1-83af-6f844d8e106a service nova] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Received event network-vif-plugged-5dca7a03-21d2-431e-95cc-a6baa1929b65 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1738.317342] env[63024]: DEBUG oslo_concurrency.lockutils [req-2f7307c1-f64c-4872-a018-882996efeeb7 req-621685ef-a746-45c1-83af-6f844d8e106a service nova] Acquiring lock "82b7019c-5049-4b8b-abb4-46f326ce3d5b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1738.317342] env[63024]: DEBUG oslo_concurrency.lockutils [req-2f7307c1-f64c-4872-a018-882996efeeb7 req-621685ef-a746-45c1-83af-6f844d8e106a service nova] Lock "82b7019c-5049-4b8b-abb4-46f326ce3d5b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1738.317342] env[63024]: DEBUG oslo_concurrency.lockutils [req-2f7307c1-f64c-4872-a018-882996efeeb7 req-621685ef-a746-45c1-83af-6f844d8e106a service nova] Lock "82b7019c-5049-4b8b-abb4-46f326ce3d5b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1738.317342] env[63024]: DEBUG nova.compute.manager [req-2f7307c1-f64c-4872-a018-882996efeeb7 req-621685ef-a746-45c1-83af-6f844d8e106a service nova] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] No waiting events found dispatching network-vif-plugged-5dca7a03-21d2-431e-95cc-a6baa1929b65 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1738.317786] env[63024]: WARNING nova.compute.manager [req-2f7307c1-f64c-4872-a018-882996efeeb7 req-621685ef-a746-45c1-83af-6f844d8e106a service nova] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Received unexpected event network-vif-plugged-5dca7a03-21d2-431e-95cc-a6baa1929b65 for instance with vm_state building and task_state spawning. [ 1738.351835] env[63024]: DEBUG oslo_vmware.api [None req-c781d4bd-5767-4107-89dc-10b704ae11d8 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950864, 'name': CreateSnapshot_Task, 'duration_secs': 0.855828} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1738.352135] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c781d4bd-5767-4107-89dc-10b704ae11d8 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 37792b57-3347-4134-a060-53359afa3298] Created Snapshot of the VM instance {{(pid=63024) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1738.352980] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81a2716e-a9f4-42e4-b011-e4445f4a128f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.376094] env[63024]: DEBUG oslo_vmware.api [None req-fafcb525-9d6d-4370-8611-bc6302ab109d tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Task: {'id': task-1950866, 'name': PowerOffVM_Task, 'duration_secs': 0.336314} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1738.376094] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-fafcb525-9d6d-4370-8611-bc6302ab109d tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1738.376094] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-fafcb525-9d6d-4370-8611-bc6302ab109d tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1738.376094] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a38932d8-67b2-4f3f-9141-70511f89135a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.411355] env[63024]: DEBUG oslo_vmware.api [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52fa5075-1075-e239-7561-43dda9bc7080, 'name': SearchDatastore_Task, 'duration_secs': 0.043163} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1738.411930] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1738.412050] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1738.412206] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1738.412350] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1738.412528] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1738.412792] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4b5781a5-03d1-4708-9bc6-796812bcd3f1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.423423] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1738.423630] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1738.424433] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-943cd4d6-fa12-4acc-aeb7-d7b9d2dce10a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.430820] env[63024]: DEBUG oslo_vmware.api [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1738.430820] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5205f7ca-5ac1-7b09-305a-32cb4be7f240" [ 1738.430820] env[63024]: _type = "Task" [ 1738.430820] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.439492] env[63024]: DEBUG oslo_vmware.api [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5205f7ca-5ac1-7b09-305a-32cb4be7f240, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.454961] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-fafcb525-9d6d-4370-8611-bc6302ab109d tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1738.455235] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-fafcb525-9d6d-4370-8611-bc6302ab109d tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1738.455623] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-fafcb525-9d6d-4370-8611-bc6302ab109d tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Deleting the datastore file [datastore1] 18444b47-476a-4ca3-9a4f-0dc58e652143 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1738.456070] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-31794a55-f0e8-4b07-847d-cf3149336993 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.463568] env[63024]: DEBUG oslo_vmware.api [None req-fafcb525-9d6d-4370-8611-bc6302ab109d tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Waiting for the task: (returnval){ [ 1738.463568] env[63024]: value = "task-1950871" [ 1738.463568] env[63024]: _type = "Task" [ 1738.463568] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.472262] env[63024]: DEBUG oslo_vmware.api [None req-fafcb525-9d6d-4370-8611-bc6302ab109d tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Task: {'id': task-1950871, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.535439] env[63024]: DEBUG nova.scheduler.client.report [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Updated inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with generation 75 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1738.535707] env[63024]: DEBUG nova.compute.provider_tree [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Updating resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b generation from 75 to 76 during operation: update_inventory {{(pid=63024) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1738.535906] env[63024]: DEBUG nova.compute.provider_tree [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1738.577365] env[63024]: DEBUG oslo_vmware.api [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1950867, 'name': Rename_Task, 'duration_secs': 0.273593} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1738.577579] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1738.577832] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5c9da00e-8bd2-4253-829f-9e8e8a027519 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.585527] env[63024]: DEBUG oslo_vmware.api [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1738.585527] env[63024]: value = "task-1950872" [ 1738.585527] env[63024]: _type = "Task" [ 1738.585527] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.593932] env[63024]: DEBUG oslo_vmware.api [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1950872, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.658324] env[63024]: DEBUG oslo_vmware.api [None req-fb55bb59-ab72-4dee-8b5d-9286fef32c95 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': task-1950862, 'name': ReconfigVM_Task, 'duration_secs': 1.169636} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1738.658951] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb55bb59-ab72-4dee-8b5d-9286fef32c95 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Reconfigured VM instance instance-00000019 to attach disk [datastore1] volume-eb2fb348-971e-4d2b-9510-bd374d5998e0/volume-eb2fb348-971e-4d2b-9510-bd374d5998e0.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1738.666390] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3b4bd2ab-eabe-4549-9632-cd6166704bf1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.691819] env[63024]: DEBUG oslo_vmware.api [None req-fb55bb59-ab72-4dee-8b5d-9286fef32c95 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Waiting for the task: (returnval){ [ 1738.691819] env[63024]: value = "task-1950873" [ 1738.691819] env[63024]: _type = "Task" [ 1738.691819] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.704964] env[63024]: DEBUG oslo_vmware.api [None req-fb55bb59-ab72-4dee-8b5d-9286fef32c95 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': task-1950873, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.742438] env[63024]: DEBUG oslo_vmware.api [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950868, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077822} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1738.742789] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1738.743823] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9bc0ae2-cbc5-4465-beb9-17ccff1131b3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.771313] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Reconfiguring VM instance instance-00000029 to attach disk [datastore1] 8edc24d6-9073-4836-b14b-422df3ac1b88/8edc24d6-9073-4836-b14b-422df3ac1b88.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1738.771700] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-892e3e9e-0dc8-4de6-8f60-cb9f6777438b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.794414] env[63024]: DEBUG oslo_vmware.api [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1738.794414] env[63024]: value = "task-1950874" [ 1738.794414] env[63024]: _type = "Task" [ 1738.794414] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.810185] env[63024]: DEBUG oslo_vmware.api [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950874, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.814782] env[63024]: DEBUG oslo_vmware.api [None req-e5892a03-6724-4137-9976-8580386b01d7 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1950869, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.874952] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c781d4bd-5767-4107-89dc-10b704ae11d8 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 37792b57-3347-4134-a060-53359afa3298] Creating linked-clone VM from snapshot {{(pid=63024) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1738.875366] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-46c22ddb-c83e-41ab-ac26-148bfd714de7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.885808] env[63024]: DEBUG oslo_vmware.api [None req-c781d4bd-5767-4107-89dc-10b704ae11d8 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1738.885808] env[63024]: value = "task-1950875" [ 1738.885808] env[63024]: _type = "Task" [ 1738.885808] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.895174] env[63024]: DEBUG oslo_vmware.api [None req-c781d4bd-5767-4107-89dc-10b704ae11d8 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950875, 'name': CloneVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.942051] env[63024]: DEBUG oslo_vmware.api [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5205f7ca-5ac1-7b09-305a-32cb4be7f240, 'name': SearchDatastore_Task, 'duration_secs': 0.014792} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1738.942999] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e88d931-d7b3-4e30-a166-3e96b6e39760 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1738.949153] env[63024]: DEBUG oslo_vmware.api [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1738.949153] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52282a37-0ddf-f097-b271-ab43992d0c63" [ 1738.949153] env[63024]: _type = "Task" [ 1738.949153] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1738.960692] env[63024]: DEBUG oslo_vmware.api [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52282a37-0ddf-f097-b271-ab43992d0c63, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1738.973392] env[63024]: DEBUG oslo_vmware.api [None req-fafcb525-9d6d-4370-8611-bc6302ab109d tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Task: {'id': task-1950871, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.33958} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1738.975041] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-fafcb525-9d6d-4370-8611-bc6302ab109d tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1738.975041] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-fafcb525-9d6d-4370-8611-bc6302ab109d tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1738.975041] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-fafcb525-9d6d-4370-8611-bc6302ab109d tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1738.975041] env[63024]: INFO nova.compute.manager [None req-fafcb525-9d6d-4370-8611-bc6302ab109d tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1738.975041] env[63024]: DEBUG oslo.service.loopingcall [None req-fafcb525-9d6d-4370-8611-bc6302ab109d tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1738.975041] env[63024]: DEBUG nova.compute.manager [-] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1738.975041] env[63024]: DEBUG nova.network.neutron [-] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1739.041082] env[63024]: DEBUG oslo_concurrency.lockutils [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 4.551s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1739.043997] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3b7b8a3c-c1a2-4fa3-8bad-43bc115fe399 tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.761s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1739.043997] env[63024]: DEBUG nova.objects.instance [None req-3b7b8a3c-c1a2-4fa3-8bad-43bc115fe399 tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Lazy-loading 'resources' on Instance uuid cc5cfa6d-d3db-4997-8413-2460e1124f02 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1739.064458] env[63024]: INFO nova.scheduler.client.report [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Deleted allocations for instance ccd80e20-9fc2-415a-a428-fcf85994c7f8 [ 1739.098190] env[63024]: DEBUG oslo_vmware.api [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1950872, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.205253] env[63024]: DEBUG oslo_vmware.api [None req-fb55bb59-ab72-4dee-8b5d-9286fef32c95 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': task-1950873, 'name': ReconfigVM_Task, 'duration_secs': 0.182597} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.205253] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb55bb59-ab72-4dee-8b5d-9286fef32c95 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402099', 'volume_id': 'eb2fb348-971e-4d2b-9510-bd374d5998e0', 'name': 'volume-eb2fb348-971e-4d2b-9510-bd374d5998e0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'b7f26f0e-d5a9-42a6-8af2-065659f89cf5', 'attached_at': '', 'detached_at': '', 'volume_id': 'eb2fb348-971e-4d2b-9510-bd374d5998e0', 'serial': 'eb2fb348-971e-4d2b-9510-bd374d5998e0'} {{(pid=63024) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1739.316257] env[63024]: DEBUG oslo_vmware.api [None req-e5892a03-6724-4137-9976-8580386b01d7 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1950869, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.323560] env[63024]: DEBUG oslo_vmware.api [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950874, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.387980] env[63024]: DEBUG oslo_concurrency.lockutils [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Acquiring lock "9267e5e4-732d-47f1-8a30-d926a1269fb9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1739.388998] env[63024]: DEBUG oslo_concurrency.lockutils [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Lock "9267e5e4-732d-47f1-8a30-d926a1269fb9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1739.401457] env[63024]: DEBUG oslo_vmware.api [None req-c781d4bd-5767-4107-89dc-10b704ae11d8 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950875, 'name': CloneVM_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.459917] env[63024]: DEBUG oslo_vmware.api [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52282a37-0ddf-f097-b271-ab43992d0c63, 'name': SearchDatastore_Task, 'duration_secs': 0.028528} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1739.460121] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1739.460369] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 82b7019c-5049-4b8b-abb4-46f326ce3d5b/82b7019c-5049-4b8b-abb4-46f326ce3d5b.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1739.460692] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-804cf0dc-25ca-4528-94bc-cd7ecb14b964 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1739.469641] env[63024]: DEBUG oslo_vmware.api [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1739.469641] env[63024]: value = "task-1950876" [ 1739.469641] env[63024]: _type = "Task" [ 1739.469641] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1739.479225] env[63024]: DEBUG oslo_vmware.api [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1950876, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.575738] env[63024]: DEBUG oslo_concurrency.lockutils [None req-193eeebf-e8f1-4a6e-a750-162df7b64339 tempest-ServerRescueTestJSON-1014033088 tempest-ServerRescueTestJSON-1014033088-project-member] Lock "ccd80e20-9fc2-415a-a428-fcf85994c7f8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.493s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1739.597710] env[63024]: DEBUG oslo_vmware.api [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1950872, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.810908] env[63024]: DEBUG oslo_vmware.api [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950874, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.816585] env[63024]: DEBUG oslo_vmware.api [None req-e5892a03-6724-4137-9976-8580386b01d7 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1950869, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.904410] env[63024]: DEBUG oslo_vmware.api [None req-c781d4bd-5767-4107-89dc-10b704ae11d8 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950875, 'name': CloneVM_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1739.929701] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3636e943-2d61-49f0-83a8-71a4eaa33724 tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Acquiring lock "3815d381-760d-40fc-98cf-8e6af287007f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1739.929971] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3636e943-2d61-49f0-83a8-71a4eaa33724 tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Lock "3815d381-760d-40fc-98cf-8e6af287007f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1739.930240] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3636e943-2d61-49f0-83a8-71a4eaa33724 tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Acquiring lock "3815d381-760d-40fc-98cf-8e6af287007f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1739.930474] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3636e943-2d61-49f0-83a8-71a4eaa33724 tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Lock "3815d381-760d-40fc-98cf-8e6af287007f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1739.930819] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3636e943-2d61-49f0-83a8-71a4eaa33724 tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Lock "3815d381-760d-40fc-98cf-8e6af287007f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1739.933455] env[63024]: INFO nova.compute.manager [None req-3636e943-2d61-49f0-83a8-71a4eaa33724 tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] Terminating instance [ 1739.980934] env[63024]: DEBUG oslo_vmware.api [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1950876, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.100056] env[63024]: DEBUG oslo_vmware.api [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1950872, 'name': PowerOnVM_Task, 'duration_secs': 1.021431} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.104245] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1740.104245] env[63024]: INFO nova.compute.manager [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Took 10.68 seconds to spawn the instance on the hypervisor. [ 1740.104245] env[63024]: DEBUG nova.compute.manager [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1740.104945] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e873f55-c389-44d6-b1d2-6edf7ba29111 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.136052] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fc9945e-b54c-4b82-bb62-530266c0b894 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.143370] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57a0c5a6-a111-4ac1-a9a8-4441ed986880 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.181745] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ba623d7-3d45-4f3e-ab4e-4e04819e7312 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.193425] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d5b0ead-b49b-4131-8b6c-96d1e640061f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.208786] env[63024]: DEBUG nova.compute.provider_tree [None req-3b7b8a3c-c1a2-4fa3-8bad-43bc115fe399 tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1740.256726] env[63024]: DEBUG nova.objects.instance [None req-fb55bb59-ab72-4dee-8b5d-9286fef32c95 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Lazy-loading 'flavor' on Instance uuid b7f26f0e-d5a9-42a6-8af2-065659f89cf5 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1740.317605] env[63024]: DEBUG oslo_vmware.api [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950874, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.324523] env[63024]: DEBUG oslo_vmware.api [None req-e5892a03-6724-4137-9976-8580386b01d7 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1950869, 'name': ReconfigVM_Task, 'duration_secs': 1.608406} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.325116] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e5892a03-6724-4137-9976-8580386b01d7 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Releasing lock "9cf45c3a-2a74-4f8e-8817-47bbd748a44b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1740.325366] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e5892a03-6724-4137-9976-8580386b01d7 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Reconfigured VM to attach interface {{(pid=63024) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1740.404034] env[63024]: DEBUG nova.compute.manager [req-5f576b20-6b85-4cad-a383-adaef41d90f6 req-545c8a6c-b86e-4e55-bdfc-b53900d1ef12 service nova] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Received event network-changed-5dca7a03-21d2-431e-95cc-a6baa1929b65 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1740.404034] env[63024]: DEBUG nova.compute.manager [req-5f576b20-6b85-4cad-a383-adaef41d90f6 req-545c8a6c-b86e-4e55-bdfc-b53900d1ef12 service nova] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Refreshing instance network info cache due to event network-changed-5dca7a03-21d2-431e-95cc-a6baa1929b65. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1740.404034] env[63024]: DEBUG oslo_concurrency.lockutils [req-5f576b20-6b85-4cad-a383-adaef41d90f6 req-545c8a6c-b86e-4e55-bdfc-b53900d1ef12 service nova] Acquiring lock "refresh_cache-82b7019c-5049-4b8b-abb4-46f326ce3d5b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1740.404034] env[63024]: DEBUG oslo_concurrency.lockutils [req-5f576b20-6b85-4cad-a383-adaef41d90f6 req-545c8a6c-b86e-4e55-bdfc-b53900d1ef12 service nova] Acquired lock "refresh_cache-82b7019c-5049-4b8b-abb4-46f326ce3d5b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1740.404364] env[63024]: DEBUG nova.network.neutron [req-5f576b20-6b85-4cad-a383-adaef41d90f6 req-545c8a6c-b86e-4e55-bdfc-b53900d1ef12 service nova] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Refreshing network info cache for port 5dca7a03-21d2-431e-95cc-a6baa1929b65 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1740.409469] env[63024]: DEBUG oslo_vmware.api [None req-c781d4bd-5767-4107-89dc-10b704ae11d8 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950875, 'name': CloneVM_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.423134] env[63024]: DEBUG nova.network.neutron [-] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1740.437040] env[63024]: DEBUG nova.compute.manager [None req-3636e943-2d61-49f0-83a8-71a4eaa33724 tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1740.437276] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3636e943-2d61-49f0-83a8-71a4eaa33724 tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1740.439078] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ba378df-ae54-4376-9c21-5b87eaabfc51 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.447396] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-3636e943-2d61-49f0-83a8-71a4eaa33724 tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1740.447644] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ea86f1cb-6ab7-4849-9a01-54869d3665c8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.454282] env[63024]: DEBUG oslo_vmware.api [None req-3636e943-2d61-49f0-83a8-71a4eaa33724 tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Waiting for the task: (returnval){ [ 1740.454282] env[63024]: value = "task-1950877" [ 1740.454282] env[63024]: _type = "Task" [ 1740.454282] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1740.463007] env[63024]: DEBUG oslo_vmware.api [None req-3636e943-2d61-49f0-83a8-71a4eaa33724 tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Task: {'id': task-1950877, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.481889] env[63024]: DEBUG oslo_vmware.api [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1950876, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.847961} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.481889] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 82b7019c-5049-4b8b-abb4-46f326ce3d5b/82b7019c-5049-4b8b-abb4-46f326ce3d5b.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1740.482072] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1740.482425] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7ab51d2d-000e-42bc-8fc3-9802a754392c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.489127] env[63024]: DEBUG oslo_vmware.api [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1740.489127] env[63024]: value = "task-1950878" [ 1740.489127] env[63024]: _type = "Task" [ 1740.489127] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1740.498554] env[63024]: DEBUG oslo_vmware.api [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1950878, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.624424] env[63024]: INFO nova.compute.manager [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Took 41.39 seconds to build instance. [ 1740.740465] env[63024]: ERROR nova.scheduler.client.report [None req-3b7b8a3c-c1a2-4fa3-8bad-43bc115fe399 tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] [req-850e8e8e-c13d-48d8-b9bf-eaf7dbc85305] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 89dfa68a-133e-436f-a9f1-86051f9fb96b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-850e8e8e-c13d-48d8-b9bf-eaf7dbc85305"}]} [ 1740.761990] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fb55bb59-ab72-4dee-8b5d-9286fef32c95 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Lock "b7f26f0e-d5a9-42a6-8af2-065659f89cf5" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.335s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1740.772981] env[63024]: DEBUG nova.scheduler.client.report [None req-3b7b8a3c-c1a2-4fa3-8bad-43bc115fe399 tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Refreshing inventories for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1740.789373] env[63024]: DEBUG nova.scheduler.client.report [None req-3b7b8a3c-c1a2-4fa3-8bad-43bc115fe399 tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Updating ProviderTree inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1740.789626] env[63024]: DEBUG nova.compute.provider_tree [None req-3b7b8a3c-c1a2-4fa3-8bad-43bc115fe399 tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1740.804046] env[63024]: DEBUG nova.scheduler.client.report [None req-3b7b8a3c-c1a2-4fa3-8bad-43bc115fe399 tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Refreshing aggregate associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, aggregates: None {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1740.811538] env[63024]: DEBUG oslo_vmware.api [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950874, 'name': ReconfigVM_Task, 'duration_secs': 1.576805} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.811806] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Reconfigured VM instance instance-00000029 to attach disk [datastore1] 8edc24d6-9073-4836-b14b-422df3ac1b88/8edc24d6-9073-4836-b14b-422df3ac1b88.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1740.812487] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-60354d19-80e5-4afb-aa80-0010c52d5693 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1740.818345] env[63024]: DEBUG oslo_vmware.api [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1740.818345] env[63024]: value = "task-1950879" [ 1740.818345] env[63024]: _type = "Task" [ 1740.818345] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1740.826374] env[63024]: DEBUG oslo_vmware.api [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950879, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.827321] env[63024]: DEBUG nova.scheduler.client.report [None req-3b7b8a3c-c1a2-4fa3-8bad-43bc115fe399 tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Refreshing trait associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1740.833796] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e5892a03-6724-4137-9976-8580386b01d7 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "interface-9cf45c3a-2a74-4f8e-8817-47bbd748a44b-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 8.374s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1740.911957] env[63024]: DEBUG oslo_vmware.api [None req-c781d4bd-5767-4107-89dc-10b704ae11d8 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950875, 'name': CloneVM_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1740.925398] env[63024]: INFO nova.compute.manager [-] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Took 1.95 seconds to deallocate network for instance. [ 1740.969093] env[63024]: DEBUG oslo_vmware.api [None req-3636e943-2d61-49f0-83a8-71a4eaa33724 tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Task: {'id': task-1950877, 'name': PowerOffVM_Task, 'duration_secs': 0.358269} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1740.969393] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-3636e943-2d61-49f0-83a8-71a4eaa33724 tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1740.969565] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3636e943-2d61-49f0-83a8-71a4eaa33724 tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1740.969820] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aafcd166-c0bd-4bd0-a382-615261efdaad {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.001489] env[63024]: DEBUG oslo_vmware.api [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1950878, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069575} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1741.004019] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1741.004019] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca2f4c71-3788-4459-9cb0-5a1a07384ab1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.030797] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Reconfiguring VM instance instance-0000002e to attach disk [datastore1] 82b7019c-5049-4b8b-abb4-46f326ce3d5b/82b7019c-5049-4b8b-abb4-46f326ce3d5b.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1741.034205] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2c90fbba-f9d0-443f-a093-898724d12986 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.055350] env[63024]: DEBUG oslo_vmware.api [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1741.055350] env[63024]: value = "task-1950881" [ 1741.055350] env[63024]: _type = "Task" [ 1741.055350] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1741.070319] env[63024]: DEBUG oslo_vmware.api [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1950881, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.074912] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3636e943-2d61-49f0-83a8-71a4eaa33724 tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1741.075161] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3636e943-2d61-49f0-83a8-71a4eaa33724 tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1741.075418] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-3636e943-2d61-49f0-83a8-71a4eaa33724 tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Deleting the datastore file [datastore1] 3815d381-760d-40fc-98cf-8e6af287007f {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1741.077419] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3464b24c-25c7-441b-8bd1-433f08db4128 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.084992] env[63024]: DEBUG oslo_vmware.api [None req-3636e943-2d61-49f0-83a8-71a4eaa33724 tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Waiting for the task: (returnval){ [ 1741.084992] env[63024]: value = "task-1950882" [ 1741.084992] env[63024]: _type = "Task" [ 1741.084992] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1741.093300] env[63024]: DEBUG oslo_vmware.api [None req-3636e943-2d61-49f0-83a8-71a4eaa33724 tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Task: {'id': task-1950882, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.127416] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0e19c6eb-9123-44de-8074-79d54e06fe4a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Lock "c1fd4146-6dd3-49e9-a744-466e6168e158" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.378s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1741.335287] env[63024]: DEBUG oslo_vmware.api [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950879, 'name': Rename_Task, 'duration_secs': 0.146928} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1741.337187] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1741.341788] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7a0e0c8d-f66e-4510-8b16-995b263f0ab1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.350156] env[63024]: DEBUG oslo_vmware.api [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1741.350156] env[63024]: value = "task-1950883" [ 1741.350156] env[63024]: _type = "Task" [ 1741.350156] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1741.366785] env[63024]: DEBUG oslo_vmware.api [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950883, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.406377] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-336387a7-91a0-4047-8dce-60c307977e72 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.414110] env[63024]: DEBUG oslo_vmware.api [None req-c781d4bd-5767-4107-89dc-10b704ae11d8 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950875, 'name': CloneVM_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.418963] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87b587ea-2d9c-44db-9294-9664016391dc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.451729] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fafcb525-9d6d-4370-8611-bc6302ab109d tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1741.453302] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef1a1638-09f5-4dbc-8915-40195b1e5333 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.460969] env[63024]: DEBUG nova.network.neutron [req-5f576b20-6b85-4cad-a383-adaef41d90f6 req-545c8a6c-b86e-4e55-bdfc-b53900d1ef12 service nova] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Updated VIF entry in instance network info cache for port 5dca7a03-21d2-431e-95cc-a6baa1929b65. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1741.461380] env[63024]: DEBUG nova.network.neutron [req-5f576b20-6b85-4cad-a383-adaef41d90f6 req-545c8a6c-b86e-4e55-bdfc-b53900d1ef12 service nova] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Updating instance_info_cache with network_info: [{"id": "5dca7a03-21d2-431e-95cc-a6baa1929b65", "address": "fa:16:3e:fb:f6:ec", "network": {"id": "a1a2ebf5-0a3e-4f93-9a47-bd8cdab108ce", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1557111633-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5577b40f56af44eebd47761192e9510f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5dca7a03-21", "ovs_interfaceid": "5dca7a03-21d2-431e-95cc-a6baa1929b65", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1741.463551] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5088342-de8d-4276-ab39-ba5d6d8a1dd3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.478136] env[63024]: DEBUG nova.compute.provider_tree [None req-3b7b8a3c-c1a2-4fa3-8bad-43bc115fe399 tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1741.569677] env[63024]: DEBUG oslo_vmware.api [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1950881, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.597559] env[63024]: DEBUG oslo_vmware.api [None req-3636e943-2d61-49f0-83a8-71a4eaa33724 tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Task: {'id': task-1950882, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.281644} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1741.597948] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-3636e943-2d61-49f0-83a8-71a4eaa33724 tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1741.598270] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3636e943-2d61-49f0-83a8-71a4eaa33724 tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1741.598578] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3636e943-2d61-49f0-83a8-71a4eaa33724 tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1741.598871] env[63024]: INFO nova.compute.manager [None req-3636e943-2d61-49f0-83a8-71a4eaa33724 tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1741.599190] env[63024]: DEBUG oslo.service.loopingcall [None req-3636e943-2d61-49f0-83a8-71a4eaa33724 tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1741.599394] env[63024]: DEBUG nova.compute.manager [-] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1741.599492] env[63024]: DEBUG nova.network.neutron [-] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1741.630062] env[63024]: DEBUG nova.compute.manager [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1741.860942] env[63024]: DEBUG oslo_vmware.api [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950883, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1741.909502] env[63024]: DEBUG oslo_vmware.api [None req-c781d4bd-5767-4107-89dc-10b704ae11d8 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950875, 'name': CloneVM_Task, 'duration_secs': 2.968658} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1741.909745] env[63024]: INFO nova.virt.vmwareapi.vmops [None req-c781d4bd-5767-4107-89dc-10b704ae11d8 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 37792b57-3347-4134-a060-53359afa3298] Created linked-clone VM from snapshot [ 1741.910443] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7be75f2-1103-4e80-8e35-96112c8e24b9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.920747] env[63024]: DEBUG nova.virt.vmwareapi.images [None req-c781d4bd-5767-4107-89dc-10b704ae11d8 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 37792b57-3347-4134-a060-53359afa3298] Uploading image ee5d780e-10a0-4109-93a4-c4258b879f3d {{(pid=63024) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1741.949506] env[63024]: DEBUG oslo_vmware.rw_handles [None req-c781d4bd-5767-4107-89dc-10b704ae11d8 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1741.949506] env[63024]: value = "vm-402106" [ 1741.949506] env[63024]: _type = "VirtualMachine" [ 1741.949506] env[63024]: }. {{(pid=63024) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1741.949801] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-016d2c47-5546-4bbf-9481-a6491ddd6ca4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1741.958537] env[63024]: DEBUG oslo_vmware.rw_handles [None req-c781d4bd-5767-4107-89dc-10b704ae11d8 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lease: (returnval){ [ 1741.958537] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b04f88-189c-8e99-5772-392360c224cf" [ 1741.958537] env[63024]: _type = "HttpNfcLease" [ 1741.958537] env[63024]: } obtained for exporting VM: (result){ [ 1741.958537] env[63024]: value = "vm-402106" [ 1741.958537] env[63024]: _type = "VirtualMachine" [ 1741.958537] env[63024]: }. {{(pid=63024) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1741.958806] env[63024]: DEBUG oslo_vmware.api [None req-c781d4bd-5767-4107-89dc-10b704ae11d8 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the lease: (returnval){ [ 1741.958806] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b04f88-189c-8e99-5772-392360c224cf" [ 1741.958806] env[63024]: _type = "HttpNfcLease" [ 1741.958806] env[63024]: } to be ready. {{(pid=63024) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1741.966701] env[63024]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1741.966701] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b04f88-189c-8e99-5772-392360c224cf" [ 1741.966701] env[63024]: _type = "HttpNfcLease" [ 1741.966701] env[63024]: } is initializing. {{(pid=63024) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1741.968843] env[63024]: DEBUG oslo_concurrency.lockutils [req-5f576b20-6b85-4cad-a383-adaef41d90f6 req-545c8a6c-b86e-4e55-bdfc-b53900d1ef12 service nova] Releasing lock "refresh_cache-82b7019c-5049-4b8b-abb4-46f326ce3d5b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1741.969108] env[63024]: DEBUG nova.compute.manager [req-5f576b20-6b85-4cad-a383-adaef41d90f6 req-545c8a6c-b86e-4e55-bdfc-b53900d1ef12 service nova] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Received event network-vif-plugged-e04e8bec-aaf6-4150-8aa3-16baf5d05b05 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1741.969332] env[63024]: DEBUG oslo_concurrency.lockutils [req-5f576b20-6b85-4cad-a383-adaef41d90f6 req-545c8a6c-b86e-4e55-bdfc-b53900d1ef12 service nova] Acquiring lock "9cf45c3a-2a74-4f8e-8817-47bbd748a44b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1741.969634] env[63024]: DEBUG oslo_concurrency.lockutils [req-5f576b20-6b85-4cad-a383-adaef41d90f6 req-545c8a6c-b86e-4e55-bdfc-b53900d1ef12 service nova] Lock "9cf45c3a-2a74-4f8e-8817-47bbd748a44b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1741.969878] env[63024]: DEBUG oslo_concurrency.lockutils [req-5f576b20-6b85-4cad-a383-adaef41d90f6 req-545c8a6c-b86e-4e55-bdfc-b53900d1ef12 service nova] Lock "9cf45c3a-2a74-4f8e-8817-47bbd748a44b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1741.970174] env[63024]: DEBUG nova.compute.manager [req-5f576b20-6b85-4cad-a383-adaef41d90f6 req-545c8a6c-b86e-4e55-bdfc-b53900d1ef12 service nova] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] No waiting events found dispatching network-vif-plugged-e04e8bec-aaf6-4150-8aa3-16baf5d05b05 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1741.970388] env[63024]: WARNING nova.compute.manager [req-5f576b20-6b85-4cad-a383-adaef41d90f6 req-545c8a6c-b86e-4e55-bdfc-b53900d1ef12 service nova] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Received unexpected event network-vif-plugged-e04e8bec-aaf6-4150-8aa3-16baf5d05b05 for instance with vm_state active and task_state None. [ 1741.970666] env[63024]: DEBUG nova.compute.manager [req-5f576b20-6b85-4cad-a383-adaef41d90f6 req-545c8a6c-b86e-4e55-bdfc-b53900d1ef12 service nova] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Received event network-changed-e04e8bec-aaf6-4150-8aa3-16baf5d05b05 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1741.970899] env[63024]: DEBUG nova.compute.manager [req-5f576b20-6b85-4cad-a383-adaef41d90f6 req-545c8a6c-b86e-4e55-bdfc-b53900d1ef12 service nova] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Refreshing instance network info cache due to event network-changed-e04e8bec-aaf6-4150-8aa3-16baf5d05b05. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1741.971607] env[63024]: DEBUG oslo_concurrency.lockutils [req-5f576b20-6b85-4cad-a383-adaef41d90f6 req-545c8a6c-b86e-4e55-bdfc-b53900d1ef12 service nova] Acquiring lock "refresh_cache-9cf45c3a-2a74-4f8e-8817-47bbd748a44b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1741.971607] env[63024]: DEBUG oslo_concurrency.lockutils [req-5f576b20-6b85-4cad-a383-adaef41d90f6 req-545c8a6c-b86e-4e55-bdfc-b53900d1ef12 service nova] Acquired lock "refresh_cache-9cf45c3a-2a74-4f8e-8817-47bbd748a44b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1741.971607] env[63024]: DEBUG nova.network.neutron [req-5f576b20-6b85-4cad-a383-adaef41d90f6 req-545c8a6c-b86e-4e55-bdfc-b53900d1ef12 service nova] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Refreshing network info cache for port e04e8bec-aaf6-4150-8aa3-16baf5d05b05 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1742.021950] env[63024]: DEBUG nova.scheduler.client.report [None req-3b7b8a3c-c1a2-4fa3-8bad-43bc115fe399 tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Updated inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with generation 77 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1742.022703] env[63024]: DEBUG nova.compute.provider_tree [None req-3b7b8a3c-c1a2-4fa3-8bad-43bc115fe399 tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Updating resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b generation from 77 to 78 during operation: update_inventory {{(pid=63024) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1742.022937] env[63024]: DEBUG nova.compute.provider_tree [None req-3b7b8a3c-c1a2-4fa3-8bad-43bc115fe399 tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1742.074277] env[63024]: DEBUG oslo_vmware.api [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1950881, 'name': ReconfigVM_Task, 'duration_secs': 0.515498} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1742.074716] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Reconfigured VM instance instance-0000002e to attach disk [datastore1] 82b7019c-5049-4b8b-abb4-46f326ce3d5b/82b7019c-5049-4b8b-abb4-46f326ce3d5b.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1742.075827] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9416ae5e-0b55-478e-b0a7-38c2c32953ac {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.087321] env[63024]: DEBUG oslo_vmware.api [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1742.087321] env[63024]: value = "task-1950885" [ 1742.087321] env[63024]: _type = "Task" [ 1742.087321] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1742.096680] env[63024]: DEBUG oslo_vmware.api [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1950885, 'name': Rename_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.153098] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1742.363366] env[63024]: DEBUG oslo_vmware.api [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950883, 'name': PowerOnVM_Task, 'duration_secs': 0.665004} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1742.363668] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1742.363828] env[63024]: DEBUG nova.compute.manager [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1742.366522] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66a88496-8343-444f-bf54-568adf763070 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.469872] env[63024]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1742.469872] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b04f88-189c-8e99-5772-392360c224cf" [ 1742.469872] env[63024]: _type = "HttpNfcLease" [ 1742.469872] env[63024]: } is ready. {{(pid=63024) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1742.470431] env[63024]: DEBUG oslo_vmware.rw_handles [None req-c781d4bd-5767-4107-89dc-10b704ae11d8 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1742.470431] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b04f88-189c-8e99-5772-392360c224cf" [ 1742.470431] env[63024]: _type = "HttpNfcLease" [ 1742.470431] env[63024]: }. {{(pid=63024) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1742.471589] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eea0927-ad97-472d-a9de-71ab5e89dfeb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.483701] env[63024]: DEBUG oslo_vmware.rw_handles [None req-c781d4bd-5767-4107-89dc-10b704ae11d8 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f5256e-ec6a-adb3-0078-3c05210bcf34/disk-0.vmdk from lease info. {{(pid=63024) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1742.483982] env[63024]: DEBUG oslo_vmware.rw_handles [None req-c781d4bd-5767-4107-89dc-10b704ae11d8 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f5256e-ec6a-adb3-0078-3c05210bcf34/disk-0.vmdk for reading. {{(pid=63024) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1742.550073] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3b7b8a3c-c1a2-4fa3-8bad-43bc115fe399 tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.506s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1742.556356] env[63024]: DEBUG oslo_concurrency.lockutils [None req-26213794-cad2-4b34-96bf-cb02bddb6fc7 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.536s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1742.556719] env[63024]: DEBUG nova.objects.instance [None req-26213794-cad2-4b34-96bf-cb02bddb6fc7 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Lazy-loading 'resources' on Instance uuid 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1742.595383] env[63024]: DEBUG oslo_vmware.api [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1950885, 'name': Rename_Task, 'duration_secs': 0.181302} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1742.596398] env[63024]: INFO nova.scheduler.client.report [None req-3b7b8a3c-c1a2-4fa3-8bad-43bc115fe399 tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Deleted allocations for instance cc5cfa6d-d3db-4997-8413-2460e1124f02 [ 1742.597470] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1742.603485] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bd172c9b-567a-49c8-b223-791e859b2c53 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.610181] env[63024]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-d53a4953-d289-479d-9e4d-3b57a4bee9b3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1742.614167] env[63024]: DEBUG oslo_vmware.api [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1742.614167] env[63024]: value = "task-1950886" [ 1742.614167] env[63024]: _type = "Task" [ 1742.614167] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1742.615396] env[63024]: DEBUG nova.network.neutron [-] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1742.624911] env[63024]: DEBUG oslo_vmware.api [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1950886, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1742.810991] env[63024]: DEBUG nova.compute.manager [req-19a7c6b5-27e7-4e73-8b79-803b99f22dc2 req-ede46aee-2117-48e3-95d0-f50fb10ee15f service nova] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Received event network-vif-deleted-008138b2-5e34-470d-b8f1-93b1ca8df541 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1742.810991] env[63024]: DEBUG nova.compute.manager [req-19a7c6b5-27e7-4e73-8b79-803b99f22dc2 req-ede46aee-2117-48e3-95d0-f50fb10ee15f service nova] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] Received event network-vif-deleted-bc1da74a-2cfe-4e8e-9682-78541f185723 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1742.871682] env[63024]: DEBUG nova.network.neutron [req-5f576b20-6b85-4cad-a383-adaef41d90f6 req-545c8a6c-b86e-4e55-bdfc-b53900d1ef12 service nova] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Updated VIF entry in instance network info cache for port e04e8bec-aaf6-4150-8aa3-16baf5d05b05. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1742.872126] env[63024]: DEBUG nova.network.neutron [req-5f576b20-6b85-4cad-a383-adaef41d90f6 req-545c8a6c-b86e-4e55-bdfc-b53900d1ef12 service nova] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Updating instance_info_cache with network_info: [{"id": "182496b0-1eb9-4c3a-a2b9-4f3dec86f48c", "address": "fa:16:3e:2f:45:02", "network": {"id": "ffb24eaf-c6b6-414f-a69a-0c8806712ddd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-281590202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9521048e807c4ca2a6d2e74a72b829a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap182496b0-1e", "ovs_interfaceid": "182496b0-1eb9-4c3a-a2b9-4f3dec86f48c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "e04e8bec-aaf6-4150-8aa3-16baf5d05b05", "address": "fa:16:3e:b9:3b:92", "network": {"id": "ffb24eaf-c6b6-414f-a69a-0c8806712ddd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-281590202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9521048e807c4ca2a6d2e74a72b829a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape04e8bec-aa", "ovs_interfaceid": "e04e8bec-aaf6-4150-8aa3-16baf5d05b05", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1742.886252] env[63024]: DEBUG oslo_concurrency.lockutils [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1743.111741] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3b7b8a3c-c1a2-4fa3-8bad-43bc115fe399 tempest-ServerTagsTestJSON-2061380494 tempest-ServerTagsTestJSON-2061380494-project-member] Lock "cc5cfa6d-d3db-4997-8413-2460e1124f02" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.247s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1743.124453] env[63024]: INFO nova.compute.manager [-] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] Took 1.52 seconds to deallocate network for instance. [ 1743.133901] env[63024]: DEBUG oslo_vmware.api [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1950886, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1743.280964] env[63024]: DEBUG oslo_concurrency.lockutils [None req-11cdc663-510d-412c-a477-a78dfbc63ef0 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Acquiring lock "b7f26f0e-d5a9-42a6-8af2-065659f89cf5" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1743.280964] env[63024]: DEBUG oslo_concurrency.lockutils [None req-11cdc663-510d-412c-a477-a78dfbc63ef0 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Lock "b7f26f0e-d5a9-42a6-8af2-065659f89cf5" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1743.375905] env[63024]: DEBUG oslo_concurrency.lockutils [req-5f576b20-6b85-4cad-a383-adaef41d90f6 req-545c8a6c-b86e-4e55-bdfc-b53900d1ef12 service nova] Releasing lock "refresh_cache-9cf45c3a-2a74-4f8e-8817-47bbd748a44b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1743.637309] env[63024]: DEBUG oslo_vmware.api [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1950886, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1743.645456] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3636e943-2d61-49f0-83a8-71a4eaa33724 tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1743.663508] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a497a34-1608-4824-b7a8-22bdebdd3006 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.671566] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a106c0ed-16ec-448b-937b-623238a01596 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.707550] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-451b7c29-a527-46bd-8d72-8fc30b3a6beb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.715829] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bbbfc3f-d168-4f7a-8598-6b94238a5e0d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.733882] env[63024]: DEBUG nova.compute.provider_tree [None req-26213794-cad2-4b34-96bf-cb02bddb6fc7 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1743.782539] env[63024]: INFO nova.compute.manager [None req-11cdc663-510d-412c-a477-a78dfbc63ef0 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Detaching volume eb2fb348-971e-4d2b-9510-bd374d5998e0 [ 1743.834194] env[63024]: INFO nova.virt.block_device [None req-11cdc663-510d-412c-a477-a78dfbc63ef0 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Attempting to driver detach volume eb2fb348-971e-4d2b-9510-bd374d5998e0 from mountpoint /dev/sdb [ 1743.834438] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-11cdc663-510d-412c-a477-a78dfbc63ef0 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Volume detach. Driver type: vmdk {{(pid=63024) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1743.834625] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-11cdc663-510d-412c-a477-a78dfbc63ef0 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402099', 'volume_id': 'eb2fb348-971e-4d2b-9510-bd374d5998e0', 'name': 'volume-eb2fb348-971e-4d2b-9510-bd374d5998e0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'b7f26f0e-d5a9-42a6-8af2-065659f89cf5', 'attached_at': '', 'detached_at': '', 'volume_id': 'eb2fb348-971e-4d2b-9510-bd374d5998e0', 'serial': 'eb2fb348-971e-4d2b-9510-bd374d5998e0'} {{(pid=63024) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1743.836408] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1971c771-b704-4ba4-89c0-102be7020109 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.863846] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4635b569-4aa3-48ac-9efe-fe18e4e6e0db {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.875106] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe46ece4-53bb-4ac9-bf28-01c5865e5e6e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.909269] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae215f16-7ca4-46af-8a4b-485f05ff72a4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.933833] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-11cdc663-510d-412c-a477-a78dfbc63ef0 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] The volume has not been displaced from its original location: [datastore1] volume-eb2fb348-971e-4d2b-9510-bd374d5998e0/volume-eb2fb348-971e-4d2b-9510-bd374d5998e0.vmdk. No consolidation needed. {{(pid=63024) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1743.939832] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-11cdc663-510d-412c-a477-a78dfbc63ef0 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Reconfiguring VM instance instance-00000019 to detach disk 2001 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1743.940344] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-16684c97-00cf-4c21-bf6d-e5d8fc5836d5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.960678] env[63024]: DEBUG oslo_vmware.api [None req-11cdc663-510d-412c-a477-a78dfbc63ef0 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Waiting for the task: (returnval){ [ 1743.960678] env[63024]: value = "task-1950887" [ 1743.960678] env[63024]: _type = "Task" [ 1743.960678] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1743.971791] env[63024]: DEBUG oslo_vmware.api [None req-11cdc663-510d-412c-a477-a78dfbc63ef0 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': task-1950887, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.091221] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ebfa6214-a527-4ee8-ac0d-86a78da86ba9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "8edc24d6-9073-4836-b14b-422df3ac1b88" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.091556] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ebfa6214-a527-4ee8-ac0d-86a78da86ba9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "8edc24d6-9073-4836-b14b-422df3ac1b88" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1744.091979] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ebfa6214-a527-4ee8-ac0d-86a78da86ba9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "8edc24d6-9073-4836-b14b-422df3ac1b88-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.092197] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ebfa6214-a527-4ee8-ac0d-86a78da86ba9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "8edc24d6-9073-4836-b14b-422df3ac1b88-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1744.092395] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ebfa6214-a527-4ee8-ac0d-86a78da86ba9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "8edc24d6-9073-4836-b14b-422df3ac1b88-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1744.097435] env[63024]: INFO nova.compute.manager [None req-ebfa6214-a527-4ee8-ac0d-86a78da86ba9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Terminating instance [ 1744.131309] env[63024]: DEBUG oslo_vmware.api [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1950886, 'name': PowerOnVM_Task, 'duration_secs': 1.362769} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1744.131309] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1744.131309] env[63024]: INFO nova.compute.manager [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Took 9.84 seconds to spawn the instance on the hypervisor. [ 1744.131721] env[63024]: DEBUG nova.compute.manager [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1744.135583] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a25e3367-5224-4c81-9a3c-ee0ecfc6e469 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.301353] env[63024]: DEBUG nova.scheduler.client.report [None req-26213794-cad2-4b34-96bf-cb02bddb6fc7 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Updated inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with generation 78 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1744.301767] env[63024]: DEBUG nova.compute.provider_tree [None req-26213794-cad2-4b34-96bf-cb02bddb6fc7 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Updating resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b generation from 78 to 79 during operation: update_inventory {{(pid=63024) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1744.302028] env[63024]: DEBUG nova.compute.provider_tree [None req-26213794-cad2-4b34-96bf-cb02bddb6fc7 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1744.307829] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4b7396ed-8a86-497d-8bff-3c52f3ae2552 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "interface-9cf45c3a-2a74-4f8e-8817-47bbd748a44b-e04e8bec-aaf6-4150-8aa3-16baf5d05b05" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1744.308271] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4b7396ed-8a86-497d-8bff-3c52f3ae2552 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "interface-9cf45c3a-2a74-4f8e-8817-47bbd748a44b-e04e8bec-aaf6-4150-8aa3-16baf5d05b05" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1744.472575] env[63024]: DEBUG oslo_vmware.api [None req-11cdc663-510d-412c-a477-a78dfbc63ef0 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': task-1950887, 'name': ReconfigVM_Task, 'duration_secs': 0.405102} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1744.473324] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-11cdc663-510d-412c-a477-a78dfbc63ef0 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Reconfigured VM instance instance-00000019 to detach disk 2001 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1744.477728] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8317bf43-d80f-4ce3-bcd4-c6992d49ae2b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.494180] env[63024]: DEBUG oslo_vmware.api [None req-11cdc663-510d-412c-a477-a78dfbc63ef0 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Waiting for the task: (returnval){ [ 1744.494180] env[63024]: value = "task-1950888" [ 1744.494180] env[63024]: _type = "Task" [ 1744.494180] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1744.502441] env[63024]: DEBUG oslo_vmware.api [None req-11cdc663-510d-412c-a477-a78dfbc63ef0 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': task-1950888, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.604071] env[63024]: DEBUG nova.compute.manager [None req-ebfa6214-a527-4ee8-ac0d-86a78da86ba9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1744.604071] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-ebfa6214-a527-4ee8-ac0d-86a78da86ba9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1744.604071] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9016217-c2d6-4949-a63e-10f8abac3ce6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.613275] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebfa6214-a527-4ee8-ac0d-86a78da86ba9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1744.613569] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6a02d3f8-9935-4305-8929-151e27f946f3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.621102] env[63024]: DEBUG oslo_vmware.api [None req-ebfa6214-a527-4ee8-ac0d-86a78da86ba9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1744.621102] env[63024]: value = "task-1950889" [ 1744.621102] env[63024]: _type = "Task" [ 1744.621102] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1744.632104] env[63024]: DEBUG oslo_vmware.api [None req-ebfa6214-a527-4ee8-ac0d-86a78da86ba9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950889, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.663111] env[63024]: INFO nova.compute.manager [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Took 44.89 seconds to build instance. [ 1744.811071] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4b7396ed-8a86-497d-8bff-3c52f3ae2552 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "9cf45c3a-2a74-4f8e-8817-47bbd748a44b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1744.811315] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4b7396ed-8a86-497d-8bff-3c52f3ae2552 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquired lock "9cf45c3a-2a74-4f8e-8817-47bbd748a44b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1744.812153] env[63024]: DEBUG oslo_concurrency.lockutils [None req-26213794-cad2-4b34-96bf-cb02bddb6fc7 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.256s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1744.821473] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0af55cc3-0b3a-44c0-b18c-c0b12224eeff {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.825699] env[63024]: DEBUG oslo_concurrency.lockutils [None req-32e59551-f5e0-467a-81ad-4a69835f8cc9 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.915s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1744.826225] env[63024]: DEBUG nova.objects.instance [None req-32e59551-f5e0-467a-81ad-4a69835f8cc9 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Lazy-loading 'resources' on Instance uuid 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1744.849708] env[63024]: INFO nova.scheduler.client.report [None req-26213794-cad2-4b34-96bf-cb02bddb6fc7 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Deleted allocations for instance 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd [ 1744.853364] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7619d0e-e966-462c-ba00-c29473963556 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.890803] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4b7396ed-8a86-497d-8bff-3c52f3ae2552 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Reconfiguring VM to detach interface {{(pid=63024) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1744.891166] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-42f3c89b-b9d4-4507-af31-53c711ce6098 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.912051] env[63024]: DEBUG oslo_vmware.api [None req-4b7396ed-8a86-497d-8bff-3c52f3ae2552 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 1744.912051] env[63024]: value = "task-1950890" [ 1744.912051] env[63024]: _type = "Task" [ 1744.912051] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1744.921926] env[63024]: DEBUG oslo_vmware.api [None req-4b7396ed-8a86-497d-8bff-3c52f3ae2552 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1950890, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.003931] env[63024]: DEBUG oslo_vmware.api [None req-11cdc663-510d-412c-a477-a78dfbc63ef0 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': task-1950888, 'name': ReconfigVM_Task, 'duration_secs': 0.155352} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1745.004268] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-11cdc663-510d-412c-a477-a78dfbc63ef0 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402099', 'volume_id': 'eb2fb348-971e-4d2b-9510-bd374d5998e0', 'name': 'volume-eb2fb348-971e-4d2b-9510-bd374d5998e0', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'b7f26f0e-d5a9-42a6-8af2-065659f89cf5', 'attached_at': '', 'detached_at': '', 'volume_id': 'eb2fb348-971e-4d2b-9510-bd374d5998e0', 'serial': 'eb2fb348-971e-4d2b-9510-bd374d5998e0'} {{(pid=63024) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1745.133512] env[63024]: DEBUG oslo_vmware.api [None req-ebfa6214-a527-4ee8-ac0d-86a78da86ba9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950889, 'name': PowerOffVM_Task, 'duration_secs': 0.230035} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1745.133646] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-ebfa6214-a527-4ee8-ac0d-86a78da86ba9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1745.133822] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-ebfa6214-a527-4ee8-ac0d-86a78da86ba9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1745.134089] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a330808c-9dde-4692-9ac8-f6c7fe1490b1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.165230] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f1fc88c8-9957-4d7c-a089-43c39aa1d7ce tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Lock "82b7019c-5049-4b8b-abb4-46f326ce3d5b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.797s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1745.210270] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-ebfa6214-a527-4ee8-ac0d-86a78da86ba9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1745.210451] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-ebfa6214-a527-4ee8-ac0d-86a78da86ba9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1745.210582] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-ebfa6214-a527-4ee8-ac0d-86a78da86ba9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Deleting the datastore file [datastore1] 8edc24d6-9073-4836-b14b-422df3ac1b88 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1745.210848] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6883e958-ccd7-426f-83db-f5d9248d99ea {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.219992] env[63024]: DEBUG oslo_vmware.api [None req-ebfa6214-a527-4ee8-ac0d-86a78da86ba9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1745.219992] env[63024]: value = "task-1950892" [ 1745.219992] env[63024]: _type = "Task" [ 1745.219992] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1745.228276] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "1709d916-d0c4-4706-b41b-8b0ed25f3331" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1745.229174] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "1709d916-d0c4-4706-b41b-8b0ed25f3331" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1745.233559] env[63024]: DEBUG oslo_vmware.api [None req-ebfa6214-a527-4ee8-ac0d-86a78da86ba9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950892, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.364305] env[63024]: DEBUG oslo_concurrency.lockutils [None req-26213794-cad2-4b34-96bf-cb02bddb6fc7 tempest-ServersTestFqdnHostnames-354726092 tempest-ServersTestFqdnHostnames-354726092-project-member] Lock "04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.370s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1745.426807] env[63024]: DEBUG oslo_vmware.api [None req-4b7396ed-8a86-497d-8bff-3c52f3ae2552 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1950890, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.569198] env[63024]: DEBUG nova.objects.instance [None req-11cdc663-510d-412c-a477-a78dfbc63ef0 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Lazy-loading 'flavor' on Instance uuid b7f26f0e-d5a9-42a6-8af2-065659f89cf5 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1745.668361] env[63024]: DEBUG nova.compute.manager [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1745.736151] env[63024]: DEBUG oslo_vmware.api [None req-ebfa6214-a527-4ee8-ac0d-86a78da86ba9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1950892, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.172987} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1745.736272] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-ebfa6214-a527-4ee8-ac0d-86a78da86ba9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1745.738296] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-ebfa6214-a527-4ee8-ac0d-86a78da86ba9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1745.739367] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-ebfa6214-a527-4ee8-ac0d-86a78da86ba9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1745.739595] env[63024]: INFO nova.compute.manager [None req-ebfa6214-a527-4ee8-ac0d-86a78da86ba9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1745.739872] env[63024]: DEBUG oslo.service.loopingcall [None req-ebfa6214-a527-4ee8-ac0d-86a78da86ba9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1745.740434] env[63024]: DEBUG nova.compute.manager [-] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1745.740545] env[63024]: DEBUG nova.network.neutron [-] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1745.928074] env[63024]: DEBUG oslo_vmware.api [None req-4b7396ed-8a86-497d-8bff-3c52f3ae2552 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1950890, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1745.931779] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9091ffa-d8ad-44b9-b09c-e323115ee7e1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.940039] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c39134ee-6f43-424e-9d3f-128212475982 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.976791] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c9c3569-d90a-465c-8bad-8a57edc4afbc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.986173] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9695f79b-d90c-4066-b011-32fad69c3be1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.007088] env[63024]: DEBUG nova.compute.provider_tree [None req-32e59551-f5e0-467a-81ad-4a69835f8cc9 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1746.197574] env[63024]: DEBUG oslo_concurrency.lockutils [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1746.262999] env[63024]: DEBUG oslo_concurrency.lockutils [None req-be9efe99-5e7f-465f-b857-ad9d14dd2357 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Acquiring lock "9679a1a2-b003-4a60-a812-8b3a9b5f545f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1746.262999] env[63024]: DEBUG oslo_concurrency.lockutils [None req-be9efe99-5e7f-465f-b857-ad9d14dd2357 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Lock "9679a1a2-b003-4a60-a812-8b3a9b5f545f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1746.262999] env[63024]: DEBUG oslo_concurrency.lockutils [None req-be9efe99-5e7f-465f-b857-ad9d14dd2357 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Acquiring lock "9679a1a2-b003-4a60-a812-8b3a9b5f545f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1746.262999] env[63024]: DEBUG oslo_concurrency.lockutils [None req-be9efe99-5e7f-465f-b857-ad9d14dd2357 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Lock "9679a1a2-b003-4a60-a812-8b3a9b5f545f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1746.262999] env[63024]: DEBUG oslo_concurrency.lockutils [None req-be9efe99-5e7f-465f-b857-ad9d14dd2357 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Lock "9679a1a2-b003-4a60-a812-8b3a9b5f545f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1746.264538] env[63024]: INFO nova.compute.manager [None req-be9efe99-5e7f-465f-b857-ad9d14dd2357 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Terminating instance [ 1746.428395] env[63024]: DEBUG oslo_vmware.api [None req-4b7396ed-8a86-497d-8bff-3c52f3ae2552 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1950890, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1746.513032] env[63024]: DEBUG nova.scheduler.client.report [None req-32e59551-f5e0-467a-81ad-4a69835f8cc9 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1746.580437] env[63024]: DEBUG oslo_concurrency.lockutils [None req-11cdc663-510d-412c-a477-a78dfbc63ef0 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Lock "b7f26f0e-d5a9-42a6-8af2-065659f89cf5" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.301s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1746.640226] env[63024]: DEBUG nova.compute.manager [req-8f0c5322-26b8-4f85-8187-a554660fd285 req-30c7e84a-cd5b-400b-ab45-29d8a43ad428 service nova] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Received event network-vif-deleted-cdff8c9b-4ae6-45d3-8a7b-ef1268f2b42e {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1746.640492] env[63024]: INFO nova.compute.manager [req-8f0c5322-26b8-4f85-8187-a554660fd285 req-30c7e84a-cd5b-400b-ab45-29d8a43ad428 service nova] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Neutron deleted interface cdff8c9b-4ae6-45d3-8a7b-ef1268f2b42e; detaching it from the instance and deleting it from the info cache [ 1746.640693] env[63024]: DEBUG nova.network.neutron [req-8f0c5322-26b8-4f85-8187-a554660fd285 req-30c7e84a-cd5b-400b-ab45-29d8a43ad428 service nova] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1746.774115] env[63024]: DEBUG nova.compute.manager [None req-be9efe99-5e7f-465f-b857-ad9d14dd2357 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1746.774115] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-be9efe99-5e7f-465f-b857-ad9d14dd2357 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1746.774115] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74a2d773-9df1-4e03-9f76-a18107ae9180 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.783803] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-be9efe99-5e7f-465f-b857-ad9d14dd2357 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1746.786263] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9810042f-dc48-47fa-8d3b-e3d573035d27 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.796321] env[63024]: DEBUG oslo_vmware.api [None req-be9efe99-5e7f-465f-b857-ad9d14dd2357 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Waiting for the task: (returnval){ [ 1746.796321] env[63024]: value = "task-1950893" [ 1746.796321] env[63024]: _type = "Task" [ 1746.796321] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1746.807047] env[63024]: DEBUG oslo_vmware.api [None req-be9efe99-5e7f-465f-b857-ad9d14dd2357 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Task: {'id': task-1950893, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1746.853842] env[63024]: DEBUG nova.network.neutron [-] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1746.929773] env[63024]: DEBUG oslo_vmware.api [None req-4b7396ed-8a86-497d-8bff-3c52f3ae2552 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1950890, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1747.018563] env[63024]: DEBUG oslo_concurrency.lockutils [None req-32e59551-f5e0-467a-81ad-4a69835f8cc9 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.193s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1747.025244] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c8d52a10-ff02-4350-b082-e89db6eaa53f tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 38.157s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1747.025244] env[63024]: DEBUG nova.objects.instance [None req-c8d52a10-ff02-4350-b082-e89db6eaa53f tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Lazy-loading 'resources' on Instance uuid 28b3bfc7-2bed-4941-9f48-8bd301e1a971 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1747.059726] env[63024]: INFO nova.scheduler.client.report [None req-32e59551-f5e0-467a-81ad-4a69835f8cc9 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Deleted allocations for instance 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16 [ 1747.142815] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4572c01a-5db2-407b-9c0d-a62be0f6f2e9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.153627] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13d8d581-7938-4353-9670-6465bb9b55f1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.191108] env[63024]: DEBUG nova.compute.manager [req-8f0c5322-26b8-4f85-8187-a554660fd285 req-30c7e84a-cd5b-400b-ab45-29d8a43ad428 service nova] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Detach interface failed, port_id=cdff8c9b-4ae6-45d3-8a7b-ef1268f2b42e, reason: Instance 8edc24d6-9073-4836-b14b-422df3ac1b88 could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 1747.311729] env[63024]: DEBUG oslo_vmware.api [None req-be9efe99-5e7f-465f-b857-ad9d14dd2357 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Task: {'id': task-1950893, 'name': PowerOffVM_Task, 'duration_secs': 0.194635} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1747.311960] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-be9efe99-5e7f-465f-b857-ad9d14dd2357 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1747.312425] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-be9efe99-5e7f-465f-b857-ad9d14dd2357 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1747.312425] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c9fd61ca-ca95-4761-9bde-949f09f3e4c7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.357023] env[63024]: INFO nova.compute.manager [-] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Took 1.62 seconds to deallocate network for instance. [ 1747.429174] env[63024]: DEBUG oslo_vmware.api [None req-4b7396ed-8a86-497d-8bff-3c52f3ae2552 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1950890, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1747.574977] env[63024]: DEBUG oslo_concurrency.lockutils [None req-32e59551-f5e0-467a-81ad-4a69835f8cc9 tempest-ServersAdminNegativeTestJSON-560138798 tempest-ServersAdminNegativeTestJSON-560138798-project-member] Lock "2bfcd5e1-b1d9-4829-bea5-d8c460ceec16" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 43.292s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1747.726885] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquiring lock "b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1747.727311] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Lock "b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1747.867951] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ebfa6214-a527-4ee8-ac0d-86a78da86ba9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1747.936436] env[63024]: DEBUG oslo_vmware.api [None req-4b7396ed-8a86-497d-8bff-3c52f3ae2552 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1950890, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1747.959418] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-be9efe99-5e7f-465f-b857-ad9d14dd2357 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1747.959670] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-be9efe99-5e7f-465f-b857-ad9d14dd2357 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1747.959871] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-be9efe99-5e7f-465f-b857-ad9d14dd2357 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Deleting the datastore file [datastore1] 9679a1a2-b003-4a60-a812-8b3a9b5f545f {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1747.960142] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1a8c14c6-eca4-4d15-85e6-b59ffd8e71c8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.967724] env[63024]: DEBUG oslo_vmware.api [None req-be9efe99-5e7f-465f-b857-ad9d14dd2357 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Waiting for the task: (returnval){ [ 1747.967724] env[63024]: value = "task-1950895" [ 1747.967724] env[63024]: _type = "Task" [ 1747.967724] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1747.983770] env[63024]: DEBUG oslo_vmware.api [None req-be9efe99-5e7f-465f-b857-ad9d14dd2357 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Task: {'id': task-1950895, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1748.078568] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cd05846-5c5b-4d49-bf0a-e522f0061164 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.087354] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da5e3fea-31e1-48ad-876c-42b396e0dad9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.120424] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76454a20-ba55-4616-8f0e-c30c278c3d7d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.128938] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bbab1ca-cb17-4a94-9cba-20cf8366f7cb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1748.145283] env[63024]: DEBUG nova.compute.provider_tree [None req-c8d52a10-ff02-4350-b082-e89db6eaa53f tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1748.436680] env[63024]: DEBUG oslo_vmware.api [None req-4b7396ed-8a86-497d-8bff-3c52f3ae2552 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1950890, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1748.453755] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Acquiring lock "81f96b5a-b878-4e6c-9683-00528a4d5650" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1748.454038] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Lock "81f96b5a-b878-4e6c-9683-00528a4d5650" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1748.478875] env[63024]: DEBUG oslo_vmware.api [None req-be9efe99-5e7f-465f-b857-ad9d14dd2357 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Task: {'id': task-1950895, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.402095} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1748.479111] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-be9efe99-5e7f-465f-b857-ad9d14dd2357 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1748.479292] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-be9efe99-5e7f-465f-b857-ad9d14dd2357 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1748.479761] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-be9efe99-5e7f-465f-b857-ad9d14dd2357 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1748.479985] env[63024]: INFO nova.compute.manager [None req-be9efe99-5e7f-465f-b857-ad9d14dd2357 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Took 1.71 seconds to destroy the instance on the hypervisor. [ 1748.480408] env[63024]: DEBUG oslo.service.loopingcall [None req-be9efe99-5e7f-465f-b857-ad9d14dd2357 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1748.480933] env[63024]: DEBUG nova.compute.manager [-] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1748.481054] env[63024]: DEBUG nova.network.neutron [-] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1748.648933] env[63024]: DEBUG nova.scheduler.client.report [None req-c8d52a10-ff02-4350-b082-e89db6eaa53f tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1748.936557] env[63024]: DEBUG oslo_vmware.api [None req-4b7396ed-8a86-497d-8bff-3c52f3ae2552 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1950890, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1749.159187] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c8d52a10-ff02-4350-b082-e89db6eaa53f tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.136s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1749.165750] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7c7fbc1c-dfd2-4372-8c48-b913ca55d487 tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 37.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1749.166492] env[63024]: DEBUG nova.objects.instance [None req-7c7fbc1c-dfd2-4372-8c48-b913ca55d487 tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Lazy-loading 'resources' on Instance uuid 7146277f-2621-4e8f-a14c-49bf4dd052db {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1749.206367] env[63024]: INFO nova.scheduler.client.report [None req-c8d52a10-ff02-4350-b082-e89db6eaa53f tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Deleted allocations for instance 28b3bfc7-2bed-4941-9f48-8bd301e1a971 [ 1749.436150] env[63024]: DEBUG oslo_vmware.api [None req-4b7396ed-8a86-497d-8bff-3c52f3ae2552 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1950890, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1749.492473] env[63024]: DEBUG nova.compute.manager [req-9cef8874-4c14-4cbe-8a4f-12d5d1326bdb req-6c7016ab-52cd-4dbc-bfa0-096479efb637 service nova] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Received event network-vif-deleted-67b8a4fd-320c-4178-ab08-8bbe9fb878ba {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1749.492633] env[63024]: INFO nova.compute.manager [req-9cef8874-4c14-4cbe-8a4f-12d5d1326bdb req-6c7016ab-52cd-4dbc-bfa0-096479efb637 service nova] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Neutron deleted interface 67b8a4fd-320c-4178-ab08-8bbe9fb878ba; detaching it from the instance and deleting it from the info cache [ 1749.492814] env[63024]: DEBUG nova.network.neutron [req-9cef8874-4c14-4cbe-8a4f-12d5d1326bdb req-6c7016ab-52cd-4dbc-bfa0-096479efb637 service nova] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1749.715578] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c8d52a10-ff02-4350-b082-e89db6eaa53f tempest-InstanceActionsTestJSON-392269979 tempest-InstanceActionsTestJSON-392269979-project-member] Lock "28b3bfc7-2bed-4941-9f48-8bd301e1a971" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.920s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1749.843968] env[63024]: DEBUG nova.network.neutron [-] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1749.943646] env[63024]: DEBUG oslo_vmware.api [None req-4b7396ed-8a86-497d-8bff-3c52f3ae2552 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1950890, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1749.999219] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c7eaf304-71f9-49b2-b2f1-d8817df1a3d8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.014016] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b21a0b9f-a481-4ae3-825d-da265984d886 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.057762] env[63024]: DEBUG nova.compute.manager [req-9cef8874-4c14-4cbe-8a4f-12d5d1326bdb req-6c7016ab-52cd-4dbc-bfa0-096479efb637 service nova] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Detach interface failed, port_id=67b8a4fd-320c-4178-ab08-8bbe9fb878ba, reason: Instance 9679a1a2-b003-4a60-a812-8b3a9b5f545f could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 1750.284058] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6f49903-3d8f-464a-8802-f306102ee859 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.294434] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbecdb78-eb8a-472c-82f4-23dcf71ef4a6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.335841] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60b46882-ecdc-41c4-8d7a-e1c1532bee21 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.344972] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46268a5d-1afa-4caa-8dc2-aa7e62bcfe6a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.349549] env[63024]: INFO nova.compute.manager [-] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Took 1.87 seconds to deallocate network for instance. [ 1750.364360] env[63024]: DEBUG nova.compute.provider_tree [None req-7c7fbc1c-dfd2-4372-8c48-b913ca55d487 tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1750.437843] env[63024]: DEBUG oslo_vmware.api [None req-4b7396ed-8a86-497d-8bff-3c52f3ae2552 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1950890, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1750.637622] env[63024]: INFO nova.compute.manager [None req-770550c8-eb49-4494-9d31-dc393d4d133b tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Rebuilding instance [ 1750.696459] env[63024]: DEBUG nova.compute.manager [None req-770550c8-eb49-4494-9d31-dc393d4d133b tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1750.697329] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d1a943e-a3aa-4972-994c-1e806ecb2cde {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1750.867684] env[63024]: DEBUG oslo_concurrency.lockutils [None req-be9efe99-5e7f-465f-b857-ad9d14dd2357 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1750.868569] env[63024]: DEBUG nova.scheduler.client.report [None req-7c7fbc1c-dfd2-4372-8c48-b913ca55d487 tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1750.945204] env[63024]: DEBUG oslo_vmware.api [None req-4b7396ed-8a86-497d-8bff-3c52f3ae2552 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1950890, 'name': ReconfigVM_Task, 'duration_secs': 5.762125} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1750.945204] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4b7396ed-8a86-497d-8bff-3c52f3ae2552 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Releasing lock "9cf45c3a-2a74-4f8e-8817-47bbd748a44b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1750.945204] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4b7396ed-8a86-497d-8bff-3c52f3ae2552 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Reconfigured VM to detach interface {{(pid=63024) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1751.375704] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7c7fbc1c-dfd2-4372-8c48-b913ca55d487 tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.210s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1751.380426] env[63024]: DEBUG oslo_concurrency.lockutils [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 38.460s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1751.382048] env[63024]: INFO nova.compute.claims [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1751.405479] env[63024]: INFO nova.scheduler.client.report [None req-7c7fbc1c-dfd2-4372-8c48-b913ca55d487 tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Deleted allocations for instance 7146277f-2621-4e8f-a14c-49bf4dd052db [ 1751.716137] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-770550c8-eb49-4494-9d31-dc393d4d133b tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1751.716137] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8dc233ff-a65b-443d-af68-806f554078b9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1751.724835] env[63024]: DEBUG oslo_vmware.api [None req-770550c8-eb49-4494-9d31-dc393d4d133b tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Waiting for the task: (returnval){ [ 1751.724835] env[63024]: value = "task-1950896" [ 1751.724835] env[63024]: _type = "Task" [ 1751.724835] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1751.746876] env[63024]: DEBUG oslo_vmware.api [None req-770550c8-eb49-4494-9d31-dc393d4d133b tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Task: {'id': task-1950896, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1751.919338] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7c7fbc1c-dfd2-4372-8c48-b913ca55d487 tempest-ServersTestManualDisk-843026731 tempest-ServersTestManualDisk-843026731-project-member] Lock "7146277f-2621-4e8f-a14c-49bf4dd052db" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.007s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1752.226549] env[63024]: DEBUG nova.compute.manager [req-6cb43914-faf2-46c8-a767-55adc874329b req-c11ef81b-bfc5-4923-9ae7-13e17efa3f21 service nova] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Received event network-vif-deleted-e04e8bec-aaf6-4150-8aa3-16baf5d05b05 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1752.226808] env[63024]: INFO nova.compute.manager [req-6cb43914-faf2-46c8-a767-55adc874329b req-c11ef81b-bfc5-4923-9ae7-13e17efa3f21 service nova] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Neutron deleted interface e04e8bec-aaf6-4150-8aa3-16baf5d05b05; detaching it from the instance and deleting it from the info cache [ 1752.227313] env[63024]: DEBUG nova.network.neutron [req-6cb43914-faf2-46c8-a767-55adc874329b req-c11ef81b-bfc5-4923-9ae7-13e17efa3f21 service nova] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Updating instance_info_cache with network_info: [{"id": "182496b0-1eb9-4c3a-a2b9-4f3dec86f48c", "address": "fa:16:3e:2f:45:02", "network": {"id": "ffb24eaf-c6b6-414f-a69a-0c8806712ddd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-281590202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9521048e807c4ca2a6d2e74a72b829a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap182496b0-1e", "ovs_interfaceid": "182496b0-1eb9-4c3a-a2b9-4f3dec86f48c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1752.244334] env[63024]: DEBUG oslo_vmware.api [None req-770550c8-eb49-4494-9d31-dc393d4d133b tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Task: {'id': task-1950896, 'name': PowerOffVM_Task, 'duration_secs': 0.308164} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1752.244334] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-770550c8-eb49-4494-9d31-dc393d4d133b tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1752.248331] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-770550c8-eb49-4494-9d31-dc393d4d133b tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1752.248331] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b0b6d5df-f1bc-471f-9d9e-c79bb0f260c2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.256070] env[63024]: DEBUG oslo_vmware.api [None req-770550c8-eb49-4494-9d31-dc393d4d133b tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Waiting for the task: (returnval){ [ 1752.256070] env[63024]: value = "task-1950897" [ 1752.256070] env[63024]: _type = "Task" [ 1752.256070] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1752.270880] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-770550c8-eb49-4494-9d31-dc393d4d133b tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] VM already powered off {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1752.270880] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-770550c8-eb49-4494-9d31-dc393d4d133b tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Volume detach. Driver type: vmdk {{(pid=63024) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1752.271077] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-770550c8-eb49-4494-9d31-dc393d4d133b tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402048', 'volume_id': '1e878e2f-2053-4c16-ad0c-263307073b4e', 'name': 'volume-1e878e2f-2053-4c16-ad0c-263307073b4e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '49eb6292-012a-4296-aff8-9c460866a602', 'attached_at': '', 'detached_at': '', 'volume_id': '1e878e2f-2053-4c16-ad0c-263307073b4e', 'serial': '1e878e2f-2053-4c16-ad0c-263307073b4e'} {{(pid=63024) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1752.272213] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1799616-c521-41d2-be5d-e41e9b1f83e7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.299775] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23b35167-970c-4eab-9c01-35235b0cefae {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.310276] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10730d45-fd45-4aae-bf36-76f1d521c188 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.334938] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76a5d9c2-b2c3-4ef9-8643-712a4a50da9b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.360728] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-770550c8-eb49-4494-9d31-dc393d4d133b tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] The volume has not been displaced from its original location: [datastore1] volume-1e878e2f-2053-4c16-ad0c-263307073b4e/volume-1e878e2f-2053-4c16-ad0c-263307073b4e.vmdk. No consolidation needed. {{(pid=63024) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1752.364315] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-770550c8-eb49-4494-9d31-dc393d4d133b tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Reconfiguring VM instance instance-00000028 to detach disk 2000 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1752.365637] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-286a609c-47d0-48a8-b883-a244ebba6e80 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.386932] env[63024]: DEBUG oslo_vmware.api [None req-770550c8-eb49-4494-9d31-dc393d4d133b tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Waiting for the task: (returnval){ [ 1752.386932] env[63024]: value = "task-1950898" [ 1752.386932] env[63024]: _type = "Task" [ 1752.386932] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1752.401534] env[63024]: DEBUG oslo_vmware.api [None req-770550c8-eb49-4494-9d31-dc393d4d133b tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Task: {'id': task-1950898, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.572670] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Acquiring lock "ac60546a-37b2-4d2a-8505-61fe202e2ed0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1752.572927] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Lock "ac60546a-37b2-4d2a-8505-61fe202e2ed0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1752.679177] env[63024]: DEBUG oslo_vmware.rw_handles [None req-c781d4bd-5767-4107-89dc-10b704ae11d8 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f5256e-ec6a-adb3-0078-3c05210bcf34/disk-0.vmdk. {{(pid=63024) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1752.680075] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feb36828-f324-4699-9772-b0beda1a5180 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.688957] env[63024]: DEBUG oslo_vmware.rw_handles [None req-c781d4bd-5767-4107-89dc-10b704ae11d8 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f5256e-ec6a-adb3-0078-3c05210bcf34/disk-0.vmdk is in state: ready. {{(pid=63024) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1752.689155] env[63024]: ERROR oslo_vmware.rw_handles [None req-c781d4bd-5767-4107-89dc-10b704ae11d8 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f5256e-ec6a-adb3-0078-3c05210bcf34/disk-0.vmdk due to incomplete transfer. [ 1752.689443] env[63024]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-1fcf2740-9480-41f1-9437-fb89cd63e37d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.697223] env[63024]: DEBUG oslo_vmware.rw_handles [None req-c781d4bd-5767-4107-89dc-10b704ae11d8 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52f5256e-ec6a-adb3-0078-3c05210bcf34/disk-0.vmdk. {{(pid=63024) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1752.697432] env[63024]: DEBUG nova.virt.vmwareapi.images [None req-c781d4bd-5767-4107-89dc-10b704ae11d8 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 37792b57-3347-4134-a060-53359afa3298] Uploaded image ee5d780e-10a0-4109-93a4-c4258b879f3d to the Glance image server {{(pid=63024) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1752.700429] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-c781d4bd-5767-4107-89dc-10b704ae11d8 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 37792b57-3347-4134-a060-53359afa3298] Destroying the VM {{(pid=63024) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1752.703207] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-35620557-89a1-47ec-a2a2-4140438d71ef {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.710222] env[63024]: DEBUG oslo_vmware.api [None req-c781d4bd-5767-4107-89dc-10b704ae11d8 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1752.710222] env[63024]: value = "task-1950899" [ 1752.710222] env[63024]: _type = "Task" [ 1752.710222] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1752.719247] env[63024]: DEBUG oslo_vmware.api [None req-c781d4bd-5767-4107-89dc-10b704ae11d8 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950899, 'name': Destroy_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.731309] env[63024]: DEBUG oslo_concurrency.lockutils [req-6cb43914-faf2-46c8-a767-55adc874329b req-c11ef81b-bfc5-4923-9ae7-13e17efa3f21 service nova] Acquiring lock "9cf45c3a-2a74-4f8e-8817-47bbd748a44b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1752.731515] env[63024]: DEBUG oslo_concurrency.lockutils [req-6cb43914-faf2-46c8-a767-55adc874329b req-c11ef81b-bfc5-4923-9ae7-13e17efa3f21 service nova] Acquired lock "9cf45c3a-2a74-4f8e-8817-47bbd748a44b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1752.732433] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1400f464-240f-4e49-a577-5abe4648d7b4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.755889] env[63024]: DEBUG oslo_concurrency.lockutils [req-6cb43914-faf2-46c8-a767-55adc874329b req-c11ef81b-bfc5-4923-9ae7-13e17efa3f21 service nova] Releasing lock "9cf45c3a-2a74-4f8e-8817-47bbd748a44b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1752.755889] env[63024]: WARNING nova.compute.manager [req-6cb43914-faf2-46c8-a767-55adc874329b req-c11ef81b-bfc5-4923-9ae7-13e17efa3f21 service nova] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Detach interface failed, port_id=e04e8bec-aaf6-4150-8aa3-16baf5d05b05, reason: No device with interface-id e04e8bec-aaf6-4150-8aa3-16baf5d05b05 exists on VM: nova.exception.NotFound: No device with interface-id e04e8bec-aaf6-4150-8aa3-16baf5d05b05 exists on VM [ 1752.782779] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4b7396ed-8a86-497d-8bff-3c52f3ae2552 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "refresh_cache-9cf45c3a-2a74-4f8e-8817-47bbd748a44b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1752.782978] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4b7396ed-8a86-497d-8bff-3c52f3ae2552 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquired lock "refresh_cache-9cf45c3a-2a74-4f8e-8817-47bbd748a44b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1752.783170] env[63024]: DEBUG nova.network.neutron [None req-4b7396ed-8a86-497d-8bff-3c52f3ae2552 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1752.900406] env[63024]: DEBUG oslo_vmware.api [None req-770550c8-eb49-4494-9d31-dc393d4d133b tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Task: {'id': task-1950898, 'name': ReconfigVM_Task, 'duration_secs': 0.195342} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1752.903521] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-770550c8-eb49-4494-9d31-dc393d4d133b tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Reconfigured VM instance instance-00000028 to detach disk 2000 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1752.909324] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f73de01a-c3f1-4c87-ba55-139c226d99d8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.924467] env[63024]: DEBUG oslo_vmware.api [None req-770550c8-eb49-4494-9d31-dc393d4d133b tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Waiting for the task: (returnval){ [ 1752.924467] env[63024]: value = "task-1950900" [ 1752.924467] env[63024]: _type = "Task" [ 1752.924467] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1752.932572] env[63024]: DEBUG oslo_vmware.api [None req-770550c8-eb49-4494-9d31-dc393d4d133b tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Task: {'id': task-1950900, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1752.952677] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd160c8b-f28d-49c9-8a36-d7fb4350402c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.961611] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2646e9c-19c2-4f2a-a382-88a143988e8e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1752.993678] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-620684c5-0a3b-43a6-91a7-4329305d16fd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.001940] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdc20eec-0367-4d61-972b-479718fb87b6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.022022] env[63024]: DEBUG nova.compute.provider_tree [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1753.219911] env[63024]: DEBUG oslo_vmware.api [None req-c781d4bd-5767-4107-89dc-10b704ae11d8 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950899, 'name': Destroy_Task} progress is 33%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.434404] env[63024]: DEBUG oslo_vmware.api [None req-770550c8-eb49-4494-9d31-dc393d4d133b tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Task: {'id': task-1950900, 'name': ReconfigVM_Task, 'duration_secs': 0.253961} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1753.434724] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-770550c8-eb49-4494-9d31-dc393d4d133b tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402048', 'volume_id': '1e878e2f-2053-4c16-ad0c-263307073b4e', 'name': 'volume-1e878e2f-2053-4c16-ad0c-263307073b4e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '49eb6292-012a-4296-aff8-9c460866a602', 'attached_at': '', 'detached_at': '', 'volume_id': '1e878e2f-2053-4c16-ad0c-263307073b4e', 'serial': '1e878e2f-2053-4c16-ad0c-263307073b4e'} {{(pid=63024) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1753.434994] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-770550c8-eb49-4494-9d31-dc393d4d133b tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1753.435804] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8302c6b0-7171-4f7b-a84a-cf2e9169578e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.446257] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-770550c8-eb49-4494-9d31-dc393d4d133b tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1753.446257] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-607fb0df-3d3e-43b6-8437-955a1b2fbb12 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.516395] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-770550c8-eb49-4494-9d31-dc393d4d133b tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1753.516754] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-770550c8-eb49-4494-9d31-dc393d4d133b tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1753.517047] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-770550c8-eb49-4494-9d31-dc393d4d133b tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Deleting the datastore file [datastore1] 49eb6292-012a-4296-aff8-9c460866a602 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1753.517346] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e8165351-46f2-4238-a989-6098efdb7781 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.525645] env[63024]: DEBUG nova.scheduler.client.report [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1753.531578] env[63024]: DEBUG oslo_vmware.api [None req-770550c8-eb49-4494-9d31-dc393d4d133b tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Waiting for the task: (returnval){ [ 1753.531578] env[63024]: value = "task-1950902" [ 1753.531578] env[63024]: _type = "Task" [ 1753.531578] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1753.547269] env[63024]: DEBUG oslo_vmware.api [None req-770550c8-eb49-4494-9d31-dc393d4d133b tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Task: {'id': task-1950902, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.614218] env[63024]: DEBUG oslo_concurrency.lockutils [None req-501bb556-91b2-49fd-b71a-0b980b59405d tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "9cf45c3a-2a74-4f8e-8817-47bbd748a44b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1753.614495] env[63024]: DEBUG oslo_concurrency.lockutils [None req-501bb556-91b2-49fd-b71a-0b980b59405d tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "9cf45c3a-2a74-4f8e-8817-47bbd748a44b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1753.615650] env[63024]: DEBUG oslo_concurrency.lockutils [None req-501bb556-91b2-49fd-b71a-0b980b59405d tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "9cf45c3a-2a74-4f8e-8817-47bbd748a44b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1753.615650] env[63024]: DEBUG oslo_concurrency.lockutils [None req-501bb556-91b2-49fd-b71a-0b980b59405d tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "9cf45c3a-2a74-4f8e-8817-47bbd748a44b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1753.615650] env[63024]: DEBUG oslo_concurrency.lockutils [None req-501bb556-91b2-49fd-b71a-0b980b59405d tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "9cf45c3a-2a74-4f8e-8817-47bbd748a44b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1753.618762] env[63024]: INFO nova.compute.manager [None req-501bb556-91b2-49fd-b71a-0b980b59405d tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Terminating instance [ 1753.675255] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Acquiring lock "7cf0ac90-d87d-4644-8a88-da5328d1721d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1753.675501] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Lock "7cf0ac90-d87d-4644-8a88-da5328d1721d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1753.722931] env[63024]: DEBUG oslo_vmware.api [None req-c781d4bd-5767-4107-89dc-10b704ae11d8 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950899, 'name': Destroy_Task, 'duration_secs': 0.78436} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1753.723196] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-c781d4bd-5767-4107-89dc-10b704ae11d8 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 37792b57-3347-4134-a060-53359afa3298] Destroyed the VM [ 1753.723427] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c781d4bd-5767-4107-89dc-10b704ae11d8 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 37792b57-3347-4134-a060-53359afa3298] Deleting Snapshot of the VM instance {{(pid=63024) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1753.723727] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-486f2f77-55a3-4941-b9ee-1cd116b4ba3f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1753.740072] env[63024]: DEBUG oslo_vmware.api [None req-c781d4bd-5767-4107-89dc-10b704ae11d8 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1753.740072] env[63024]: value = "task-1950903" [ 1753.740072] env[63024]: _type = "Task" [ 1753.740072] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1753.749289] env[63024]: DEBUG oslo_vmware.api [None req-c781d4bd-5767-4107-89dc-10b704ae11d8 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950903, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1753.797487] env[63024]: DEBUG nova.network.neutron [None req-4b7396ed-8a86-497d-8bff-3c52f3ae2552 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Updating instance_info_cache with network_info: [{"id": "182496b0-1eb9-4c3a-a2b9-4f3dec86f48c", "address": "fa:16:3e:2f:45:02", "network": {"id": "ffb24eaf-c6b6-414f-a69a-0c8806712ddd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-281590202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9521048e807c4ca2a6d2e74a72b829a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap182496b0-1e", "ovs_interfaceid": "182496b0-1eb9-4c3a-a2b9-4f3dec86f48c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1754.034898] env[63024]: DEBUG oslo_concurrency.lockutils [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.654s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1754.034898] env[63024]: DEBUG nova.compute.manager [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1754.038656] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.629s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1754.038912] env[63024]: DEBUG nova.objects.instance [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Lazy-loading 'resources' on Instance uuid 8a826350-0fee-409d-a3fc-260d7d43bdf6 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1754.055059] env[63024]: DEBUG oslo_vmware.api [None req-770550c8-eb49-4494-9d31-dc393d4d133b tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Task: {'id': task-1950902, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.242372} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1754.057179] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-770550c8-eb49-4494-9d31-dc393d4d133b tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1754.057179] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-770550c8-eb49-4494-9d31-dc393d4d133b tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1754.057179] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-770550c8-eb49-4494-9d31-dc393d4d133b tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1754.126436] env[63024]: DEBUG nova.compute.manager [None req-501bb556-91b2-49fd-b71a-0b980b59405d tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1754.126544] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-501bb556-91b2-49fd-b71a-0b980b59405d tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1754.127374] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb1ea729-486c-4601-9f76-64ffb47bbaba {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.147011] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-501bb556-91b2-49fd-b71a-0b980b59405d tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1754.147011] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-83472cbb-4712-44a1-997e-c445eef34f37 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.154553] env[63024]: DEBUG oslo_vmware.api [None req-501bb556-91b2-49fd-b71a-0b980b59405d tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 1754.154553] env[63024]: value = "task-1950904" [ 1754.154553] env[63024]: _type = "Task" [ 1754.154553] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1754.162398] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-770550c8-eb49-4494-9d31-dc393d4d133b tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Volume detach. Driver type: vmdk {{(pid=63024) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1754.162722] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5ee440d2-4a27-4839-bf1b-c48c5ed47730 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.177766] env[63024]: DEBUG oslo_vmware.api [None req-501bb556-91b2-49fd-b71a-0b980b59405d tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1950904, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.182039] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce56aaff-a533-49f4-b848-15f5fd19016b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.215831] env[63024]: ERROR nova.compute.manager [None req-770550c8-eb49-4494-9d31-dc393d4d133b tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Failed to detach volume 1e878e2f-2053-4c16-ad0c-263307073b4e from /dev/sda: nova.exception.InstanceNotFound: Instance 49eb6292-012a-4296-aff8-9c460866a602 could not be found. [ 1754.215831] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] Traceback (most recent call last): [ 1754.215831] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 1754.215831] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] self.driver.rebuild(**kwargs) [ 1754.215831] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 1754.215831] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] raise NotImplementedError() [ 1754.215831] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] NotImplementedError [ 1754.215831] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] [ 1754.215831] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] During handling of the above exception, another exception occurred: [ 1754.215831] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] [ 1754.215831] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] Traceback (most recent call last): [ 1754.215831] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 1754.215831] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] self.driver.detach_volume(context, old_connection_info, [ 1754.215831] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 1754.215831] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] return self._volumeops.detach_volume(connection_info, instance) [ 1754.215831] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1754.215831] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] self._detach_volume_vmdk(connection_info, instance) [ 1754.215831] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1754.215831] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1754.215831] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1754.215831] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] stable_ref.fetch_moref(session) [ 1754.215831] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1754.215831] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1754.215831] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] nova.exception.InstanceNotFound: Instance 49eb6292-012a-4296-aff8-9c460866a602 could not be found. [ 1754.215831] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] [ 1754.248534] env[63024]: DEBUG oslo_vmware.api [None req-c781d4bd-5767-4107-89dc-10b704ae11d8 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950903, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.305704] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4b7396ed-8a86-497d-8bff-3c52f3ae2552 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Releasing lock "refresh_cache-9cf45c3a-2a74-4f8e-8817-47bbd748a44b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1754.457297] env[63024]: DEBUG nova.compute.utils [None req-770550c8-eb49-4494-9d31-dc393d4d133b tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Build of instance 49eb6292-012a-4296-aff8-9c460866a602 aborted: Failed to rebuild volume backed instance. {{(pid=63024) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1754.462830] env[63024]: ERROR nova.compute.manager [None req-770550c8-eb49-4494-9d31-dc393d4d133b tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance 49eb6292-012a-4296-aff8-9c460866a602 aborted: Failed to rebuild volume backed instance. [ 1754.462830] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] Traceback (most recent call last): [ 1754.462830] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 1754.462830] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] self.driver.rebuild(**kwargs) [ 1754.462830] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 1754.462830] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] raise NotImplementedError() [ 1754.462830] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] NotImplementedError [ 1754.462830] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] [ 1754.462830] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] During handling of the above exception, another exception occurred: [ 1754.462830] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] [ 1754.462830] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] Traceback (most recent call last): [ 1754.462830] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] File "/opt/stack/nova/nova/compute/manager.py", line 3643, in _rebuild_volume_backed_instance [ 1754.462830] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] self._detach_root_volume(context, instance, root_bdm) [ 1754.462830] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] File "/opt/stack/nova/nova/compute/manager.py", line 3622, in _detach_root_volume [ 1754.462830] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] with excutils.save_and_reraise_exception(): [ 1754.462830] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1754.462830] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] self.force_reraise() [ 1754.462830] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1754.462830] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] raise self.value [ 1754.462830] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 1754.462830] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] self.driver.detach_volume(context, old_connection_info, [ 1754.462830] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 1754.462830] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] return self._volumeops.detach_volume(connection_info, instance) [ 1754.462830] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 1754.462830] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] self._detach_volume_vmdk(connection_info, instance) [ 1754.462830] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 1754.462830] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 1754.462830] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 1754.462830] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] stable_ref.fetch_moref(session) [ 1754.462830] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 1754.462830] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] raise exception.InstanceNotFound(instance_id=self._uuid) [ 1754.462830] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] nova.exception.InstanceNotFound: Instance 49eb6292-012a-4296-aff8-9c460866a602 could not be found. [ 1754.462830] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] [ 1754.462830] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] During handling of the above exception, another exception occurred: [ 1754.462830] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] [ 1754.462830] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] Traceback (most recent call last): [ 1754.462830] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] File "/opt/stack/nova/nova/compute/manager.py", line 11195, in _error_out_instance_on_exception [ 1754.462830] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] yield [ 1754.462830] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] File "/opt/stack/nova/nova/compute/manager.py", line 3911, in rebuild_instance [ 1754.462830] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] self._do_rebuild_instance_with_claim( [ 1754.464616] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] File "/opt/stack/nova/nova/compute/manager.py", line 3997, in _do_rebuild_instance_with_claim [ 1754.464616] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] self._do_rebuild_instance( [ 1754.464616] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] File "/opt/stack/nova/nova/compute/manager.py", line 4189, in _do_rebuild_instance [ 1754.464616] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] self._rebuild_default_impl(**kwargs) [ 1754.464616] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] File "/opt/stack/nova/nova/compute/manager.py", line 3766, in _rebuild_default_impl [ 1754.464616] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] self._rebuild_volume_backed_instance( [ 1754.464616] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] File "/opt/stack/nova/nova/compute/manager.py", line 3658, in _rebuild_volume_backed_instance [ 1754.464616] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] raise exception.BuildAbortException( [ 1754.464616] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] nova.exception.BuildAbortException: Build of instance 49eb6292-012a-4296-aff8-9c460866a602 aborted: Failed to rebuild volume backed instance. [ 1754.464616] env[63024]: ERROR nova.compute.manager [instance: 49eb6292-012a-4296-aff8-9c460866a602] [ 1754.543042] env[63024]: DEBUG oslo_concurrency.lockutils [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Acquiring lock "92d1f96e-bbe7-4654-9d3a-47ba40057157" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1754.543348] env[63024]: DEBUG oslo_concurrency.lockutils [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Lock "92d1f96e-bbe7-4654-9d3a-47ba40057157" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1754.552815] env[63024]: DEBUG nova.compute.utils [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1754.558249] env[63024]: DEBUG nova.compute.manager [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1754.558439] env[63024]: DEBUG nova.network.neutron [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1754.640097] env[63024]: DEBUG nova.policy [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '73669d10734b403e843e73b3c63bef00', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '12d782556c614caf84a51b37fa43b5de', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1754.666125] env[63024]: DEBUG oslo_vmware.api [None req-501bb556-91b2-49fd-b71a-0b980b59405d tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1950904, 'name': PowerOffVM_Task, 'duration_secs': 0.254309} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1754.666440] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-501bb556-91b2-49fd-b71a-0b980b59405d tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1754.666579] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-501bb556-91b2-49fd-b71a-0b980b59405d tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1754.666827] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cd8004f2-901e-4f41-ab3f-8a5e79f8b3b6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.754309] env[63024]: DEBUG oslo_vmware.api [None req-c781d4bd-5767-4107-89dc-10b704ae11d8 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1950903, 'name': RemoveSnapshot_Task, 'duration_secs': 0.682554} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1754.754564] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-501bb556-91b2-49fd-b71a-0b980b59405d tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1754.754739] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-501bb556-91b2-49fd-b71a-0b980b59405d tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1754.754914] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-501bb556-91b2-49fd-b71a-0b980b59405d tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Deleting the datastore file [datastore1] 9cf45c3a-2a74-4f8e-8817-47bbd748a44b {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1754.762268] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c781d4bd-5767-4107-89dc-10b704ae11d8 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 37792b57-3347-4134-a060-53359afa3298] Deleted Snapshot of the VM instance {{(pid=63024) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1754.762392] env[63024]: INFO nova.compute.manager [None req-c781d4bd-5767-4107-89dc-10b704ae11d8 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 37792b57-3347-4134-a060-53359afa3298] Took 17.99 seconds to snapshot the instance on the hypervisor. [ 1754.764672] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-959061ce-b2e6-4258-bb7d-5f55522e22c4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1754.776031] env[63024]: DEBUG oslo_vmware.api [None req-501bb556-91b2-49fd-b71a-0b980b59405d tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 1754.776031] env[63024]: value = "task-1950906" [ 1754.776031] env[63024]: _type = "Task" [ 1754.776031] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1754.784680] env[63024]: DEBUG oslo_vmware.api [None req-501bb556-91b2-49fd-b71a-0b980b59405d tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1950906, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1754.813030] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4b7396ed-8a86-497d-8bff-3c52f3ae2552 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "interface-9cf45c3a-2a74-4f8e-8817-47bbd748a44b-e04e8bec-aaf6-4150-8aa3-16baf5d05b05" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.505s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1754.869916] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Acquiring lock "c12774e4-77d1-4001-8d5d-0240dfed4ead" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1754.870268] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Lock "c12774e4-77d1-4001-8d5d-0240dfed4ead" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1755.066532] env[63024]: DEBUG nova.compute.manager [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1755.142221] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-520cf600-91c9-4d31-8757-66984292ba92 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.152127] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4853ccf5-5edb-4cdb-8aca-61afa2953337 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.191268] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b80ce76-aa0f-4f99-8243-7b881e7c248c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.199850] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-197aac3a-108f-4c79-a067-489d77c8d553 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1755.216548] env[63024]: DEBUG nova.compute.provider_tree [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1755.227658] env[63024]: DEBUG nova.network.neutron [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Successfully created port: 83b7b8fb-a30a-4852-889a-ec6b339cc100 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1755.287333] env[63024]: DEBUG oslo_vmware.api [None req-501bb556-91b2-49fd-b71a-0b980b59405d tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1950906, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1755.742865] env[63024]: ERROR nova.scheduler.client.report [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] [req-10232c39-87b1-4388-9635-390ffb3012a2] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 89dfa68a-133e-436f-a9f1-86051f9fb96b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-10232c39-87b1-4388-9635-390ffb3012a2"}]} [ 1755.764170] env[63024]: DEBUG nova.scheduler.client.report [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Refreshing inventories for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1755.788734] env[63024]: DEBUG oslo_vmware.api [None req-501bb556-91b2-49fd-b71a-0b980b59405d tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1950906, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.957637} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1755.789718] env[63024]: DEBUG nova.scheduler.client.report [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Updating ProviderTree inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1755.789881] env[63024]: DEBUG nova.compute.provider_tree [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1755.791989] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-501bb556-91b2-49fd-b71a-0b980b59405d tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1755.792531] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-501bb556-91b2-49fd-b71a-0b980b59405d tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1755.792531] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-501bb556-91b2-49fd-b71a-0b980b59405d tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1755.792531] env[63024]: INFO nova.compute.manager [None req-501bb556-91b2-49fd-b71a-0b980b59405d tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Took 1.67 seconds to destroy the instance on the hypervisor. [ 1755.794245] env[63024]: DEBUG oslo.service.loopingcall [None req-501bb556-91b2-49fd-b71a-0b980b59405d tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1755.794245] env[63024]: DEBUG nova.compute.manager [-] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1755.794245] env[63024]: DEBUG nova.network.neutron [-] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1755.802776] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquiring lock "9bf1316e-f1ae-426e-a0a2-d814a2460c4d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1755.802776] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "9bf1316e-f1ae-426e-a0a2-d814a2460c4d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1755.806740] env[63024]: DEBUG nova.scheduler.client.report [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Refreshing aggregate associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, aggregates: None {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1755.831718] env[63024]: DEBUG nova.scheduler.client.report [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Refreshing trait associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1756.086967] env[63024]: DEBUG nova.compute.manager [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1756.127194] env[63024]: DEBUG nova.virt.hardware [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1756.127444] env[63024]: DEBUG nova.virt.hardware [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1756.127599] env[63024]: DEBUG nova.virt.hardware [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1756.127776] env[63024]: DEBUG nova.virt.hardware [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1756.127920] env[63024]: DEBUG nova.virt.hardware [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1756.129255] env[63024]: DEBUG nova.virt.hardware [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1756.129255] env[63024]: DEBUG nova.virt.hardware [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1756.129497] env[63024]: DEBUG nova.virt.hardware [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1756.129708] env[63024]: DEBUG nova.virt.hardware [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1756.129908] env[63024]: DEBUG nova.virt.hardware [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1756.130776] env[63024]: DEBUG nova.virt.hardware [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1756.131285] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2399ecb8-13aa-472e-b771-252da089b188 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.143458] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b9139a4-a518-4332-a596-e6aac0fd6ee3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.404489] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cdb739d-91f7-4b18-9de6-2d7f182eb207 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.413832] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-877a6a95-1b1e-47a2-94eb-a130c9c10956 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.450464] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-513ffe17-0392-41d1-bc0b-556adb9f35fd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.458605] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f43fc7c-f36f-4c94-9d67-13563224074b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1756.475613] env[63024]: DEBUG nova.compute.provider_tree [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1756.484296] env[63024]: DEBUG oslo_concurrency.lockutils [None req-770550c8-eb49-4494-9d31-dc393d4d133b tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1756.604173] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "fe6847e2-a742-4338-983f-698c13aaefde" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1756.604984] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "fe6847e2-a742-4338-983f-698c13aaefde" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1756.634200] env[63024]: DEBUG nova.compute.manager [req-d45b1695-085e-4f35-b170-e05b7a7d01c8 req-fc1df861-a10f-449d-b806-56c2b09c3b00 service nova] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Received event network-vif-deleted-182496b0-1eb9-4c3a-a2b9-4f3dec86f48c {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1756.634200] env[63024]: INFO nova.compute.manager [req-d45b1695-085e-4f35-b170-e05b7a7d01c8 req-fc1df861-a10f-449d-b806-56c2b09c3b00 service nova] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Neutron deleted interface 182496b0-1eb9-4c3a-a2b9-4f3dec86f48c; detaching it from the instance and deleting it from the info cache [ 1756.634200] env[63024]: DEBUG nova.network.neutron [req-d45b1695-085e-4f35-b170-e05b7a7d01c8 req-fc1df861-a10f-449d-b806-56c2b09c3b00 service nova] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1757.002438] env[63024]: ERROR nova.scheduler.client.report [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] [req-a703c52a-8d75-4ddc-8396-4021b684fee8] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 89dfa68a-133e-436f-a9f1-86051f9fb96b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-a703c52a-8d75-4ddc-8396-4021b684fee8"}]} [ 1757.022103] env[63024]: DEBUG nova.scheduler.client.report [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Refreshing inventories for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1757.029621] env[63024]: DEBUG nova.network.neutron [-] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1757.038529] env[63024]: DEBUG nova.scheduler.client.report [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Updating ProviderTree inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1757.038761] env[63024]: DEBUG nova.compute.provider_tree [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1757.053261] env[63024]: DEBUG nova.scheduler.client.report [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Refreshing aggregate associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, aggregates: None {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1757.075165] env[63024]: DEBUG nova.scheduler.client.report [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Refreshing trait associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1757.136386] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cb4533bc-abff-4da8-88b5-daa6c45486b5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.147773] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9d1f910-1b51-431c-8bcb-cdb9c083d5e5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.180629] env[63024]: DEBUG nova.compute.manager [req-d45b1695-085e-4f35-b170-e05b7a7d01c8 req-fc1df861-a10f-449d-b806-56c2b09c3b00 service nova] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Detach interface failed, port_id=182496b0-1eb9-4c3a-a2b9-4f3dec86f48c, reason: Instance 9cf45c3a-2a74-4f8e-8817-47bbd748a44b could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 1757.201277] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cfa196c0-94c6-40cd-9df6-630a7786f6a4 tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Acquiring lock "49eb6292-012a-4296-aff8-9c460866a602" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1757.201551] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cfa196c0-94c6-40cd-9df6-630a7786f6a4 tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Lock "49eb6292-012a-4296-aff8-9c460866a602" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1757.201770] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cfa196c0-94c6-40cd-9df6-630a7786f6a4 tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Acquiring lock "49eb6292-012a-4296-aff8-9c460866a602-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1757.201955] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cfa196c0-94c6-40cd-9df6-630a7786f6a4 tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Lock "49eb6292-012a-4296-aff8-9c460866a602-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1757.202213] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cfa196c0-94c6-40cd-9df6-630a7786f6a4 tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Lock "49eb6292-012a-4296-aff8-9c460866a602-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1757.205376] env[63024]: INFO nova.compute.manager [None req-cfa196c0-94c6-40cd-9df6-630a7786f6a4 tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Terminating instance [ 1757.434288] env[63024]: DEBUG nova.network.neutron [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Successfully updated port: 83b7b8fb-a30a-4852-889a-ec6b339cc100 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1757.532288] env[63024]: INFO nova.compute.manager [-] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Took 1.74 seconds to deallocate network for instance. [ 1757.656413] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b732e96d-2254-4388-ac4c-d5493c304861 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.668679] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b75ca291-d9bf-4037-9da4-e8892c0966c2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.705532] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85f87cd0-d1a3-4f8e-9682-bd1327881a16 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.708780] env[63024]: DEBUG nova.compute.manager [None req-cfa196c0-94c6-40cd-9df6-630a7786f6a4 tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1757.709176] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-44450276-431f-4aa6-9091-f7512febdc6c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.717340] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ace3388-5d1f-4d98-ae1a-4f4335721cde {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.723980] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd62bcbf-bb6b-4fce-9fb6-c74e46a74814 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.745426] env[63024]: DEBUG nova.compute.provider_tree [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1757.755753] env[63024]: WARNING nova.virt.vmwareapi.driver [None req-cfa196c0-94c6-40cd-9df6-630a7786f6a4 tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 49eb6292-012a-4296-aff8-9c460866a602 could not be found. [ 1757.755995] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-cfa196c0-94c6-40cd-9df6-630a7786f6a4 tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1757.756577] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3b9e5942-eed5-4985-9218-1e2aba4eaa02 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.764913] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-974c1cd0-122b-4386-ab39-95f0027ee4d2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1757.794816] env[63024]: WARNING nova.virt.vmwareapi.vmops [None req-cfa196c0-94c6-40cd-9df6-630a7786f6a4 tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 49eb6292-012a-4296-aff8-9c460866a602 could not be found. [ 1757.795063] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-cfa196c0-94c6-40cd-9df6-630a7786f6a4 tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1757.795218] env[63024]: INFO nova.compute.manager [None req-cfa196c0-94c6-40cd-9df6-630a7786f6a4 tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Took 0.09 seconds to destroy the instance on the hypervisor. [ 1757.795457] env[63024]: DEBUG oslo.service.loopingcall [None req-cfa196c0-94c6-40cd-9df6-630a7786f6a4 tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1757.795677] env[63024]: DEBUG nova.compute.manager [-] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1757.795774] env[63024]: DEBUG nova.network.neutron [-] [instance: 49eb6292-012a-4296-aff8-9c460866a602] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1757.939840] env[63024]: DEBUG oslo_concurrency.lockutils [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquiring lock "refresh_cache-b588ea21-dea0-4ee6-8f9e-12007d0a1ce1" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1757.940040] env[63024]: DEBUG oslo_concurrency.lockutils [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquired lock "refresh_cache-b588ea21-dea0-4ee6-8f9e-12007d0a1ce1" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1757.940183] env[63024]: DEBUG nova.network.neutron [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1758.044991] env[63024]: DEBUG oslo_concurrency.lockutils [None req-501bb556-91b2-49fd-b71a-0b980b59405d tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1758.291527] env[63024]: DEBUG nova.scheduler.client.report [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Updated inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with generation 89 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1758.291812] env[63024]: DEBUG nova.compute.provider_tree [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Updating resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b generation from 89 to 90 during operation: update_inventory {{(pid=63024) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1758.291992] env[63024]: DEBUG nova.compute.provider_tree [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1758.537012] env[63024]: DEBUG nova.network.neutron [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1758.797737] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 4.759s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1758.800783] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 39.342s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1758.802540] env[63024]: INFO nova.compute.claims [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1758.828794] env[63024]: INFO nova.scheduler.client.report [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Deleted allocations for instance 8a826350-0fee-409d-a3fc-260d7d43bdf6 [ 1758.839086] env[63024]: DEBUG nova.compute.manager [req-c02c26ad-fbb0-494f-b1ef-4c09a9ecd3b7 req-0594a663-1c62-4372-bad1-dac5110832be service nova] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Received event network-vif-plugged-83b7b8fb-a30a-4852-889a-ec6b339cc100 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1758.839307] env[63024]: DEBUG oslo_concurrency.lockutils [req-c02c26ad-fbb0-494f-b1ef-4c09a9ecd3b7 req-0594a663-1c62-4372-bad1-dac5110832be service nova] Acquiring lock "b588ea21-dea0-4ee6-8f9e-12007d0a1ce1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1758.839511] env[63024]: DEBUG oslo_concurrency.lockutils [req-c02c26ad-fbb0-494f-b1ef-4c09a9ecd3b7 req-0594a663-1c62-4372-bad1-dac5110832be service nova] Lock "b588ea21-dea0-4ee6-8f9e-12007d0a1ce1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1758.839675] env[63024]: DEBUG oslo_concurrency.lockutils [req-c02c26ad-fbb0-494f-b1ef-4c09a9ecd3b7 req-0594a663-1c62-4372-bad1-dac5110832be service nova] Lock "b588ea21-dea0-4ee6-8f9e-12007d0a1ce1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1758.839863] env[63024]: DEBUG nova.compute.manager [req-c02c26ad-fbb0-494f-b1ef-4c09a9ecd3b7 req-0594a663-1c62-4372-bad1-dac5110832be service nova] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] No waiting events found dispatching network-vif-plugged-83b7b8fb-a30a-4852-889a-ec6b339cc100 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1758.840048] env[63024]: WARNING nova.compute.manager [req-c02c26ad-fbb0-494f-b1ef-4c09a9ecd3b7 req-0594a663-1c62-4372-bad1-dac5110832be service nova] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Received unexpected event network-vif-plugged-83b7b8fb-a30a-4852-889a-ec6b339cc100 for instance with vm_state building and task_state spawning. [ 1758.840210] env[63024]: DEBUG nova.compute.manager [req-c02c26ad-fbb0-494f-b1ef-4c09a9ecd3b7 req-0594a663-1c62-4372-bad1-dac5110832be service nova] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Received event network-changed-83b7b8fb-a30a-4852-889a-ec6b339cc100 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1758.840359] env[63024]: DEBUG nova.compute.manager [req-c02c26ad-fbb0-494f-b1ef-4c09a9ecd3b7 req-0594a663-1c62-4372-bad1-dac5110832be service nova] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Refreshing instance network info cache due to event network-changed-83b7b8fb-a30a-4852-889a-ec6b339cc100. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1758.840559] env[63024]: DEBUG oslo_concurrency.lockutils [req-c02c26ad-fbb0-494f-b1ef-4c09a9ecd3b7 req-0594a663-1c62-4372-bad1-dac5110832be service nova] Acquiring lock "refresh_cache-b588ea21-dea0-4ee6-8f9e-12007d0a1ce1" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1758.974672] env[63024]: DEBUG nova.network.neutron [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Updating instance_info_cache with network_info: [{"id": "83b7b8fb-a30a-4852-889a-ec6b339cc100", "address": "fa:16:3e:cf:79:7d", "network": {"id": "20e77237-56c4-40bb-8203-aa035239c938", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1073367986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12d782556c614caf84a51b37fa43b5de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a64108f9-df0a-4feb-bbb5-97f5841c356c", "external-id": "nsx-vlan-transportzone-67", "segmentation_id": 67, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83b7b8fb-a3", "ovs_interfaceid": "83b7b8fb-a30a-4852-889a-ec6b339cc100", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1759.337119] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b35b595e-7403-4b16-8cb9-d6826b017ccc tempest-ServersTestJSON-490274306 tempest-ServersTestJSON-490274306-project-member] Lock "8a826350-0fee-409d-a3fc-260d7d43bdf6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 47.225s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1759.479241] env[63024]: DEBUG oslo_concurrency.lockutils [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Releasing lock "refresh_cache-b588ea21-dea0-4ee6-8f9e-12007d0a1ce1" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1759.480069] env[63024]: DEBUG nova.compute.manager [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Instance network_info: |[{"id": "83b7b8fb-a30a-4852-889a-ec6b339cc100", "address": "fa:16:3e:cf:79:7d", "network": {"id": "20e77237-56c4-40bb-8203-aa035239c938", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1073367986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12d782556c614caf84a51b37fa43b5de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a64108f9-df0a-4feb-bbb5-97f5841c356c", "external-id": "nsx-vlan-transportzone-67", "segmentation_id": 67, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83b7b8fb-a3", "ovs_interfaceid": "83b7b8fb-a30a-4852-889a-ec6b339cc100", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1759.481170] env[63024]: DEBUG oslo_concurrency.lockutils [req-c02c26ad-fbb0-494f-b1ef-4c09a9ecd3b7 req-0594a663-1c62-4372-bad1-dac5110832be service nova] Acquired lock "refresh_cache-b588ea21-dea0-4ee6-8f9e-12007d0a1ce1" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1759.481369] env[63024]: DEBUG nova.network.neutron [req-c02c26ad-fbb0-494f-b1ef-4c09a9ecd3b7 req-0594a663-1c62-4372-bad1-dac5110832be service nova] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Refreshing network info cache for port 83b7b8fb-a30a-4852-889a-ec6b339cc100 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1759.482588] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cf:79:7d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a64108f9-df0a-4feb-bbb5-97f5841c356c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '83b7b8fb-a30a-4852-889a-ec6b339cc100', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1759.492540] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Creating folder: Project (12d782556c614caf84a51b37fa43b5de). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1759.493322] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6ee31a74-391c-4323-88eb-14d1286531bf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.508106] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Created folder: Project (12d782556c614caf84a51b37fa43b5de) in parent group-v401959. [ 1759.508106] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Creating folder: Instances. Parent ref: group-v402107. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1759.508106] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-58166440-60a2-4c52-bbbe-c4cfc99e7c9b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.515853] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Created folder: Instances in parent group-v402107. [ 1759.518019] env[63024]: DEBUG oslo.service.loopingcall [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1759.518019] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1759.518019] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-94da2dd8-6bd7-4f73-8ee5-31ad664aefa0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1759.543020] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1759.543020] env[63024]: value = "task-1950909" [ 1759.543020] env[63024]: _type = "Task" [ 1759.543020] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1759.549275] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950909, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1759.611297] env[63024]: DEBUG nova.network.neutron [-] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1760.052109] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950909, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.117978] env[63024]: INFO nova.compute.manager [-] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Took 2.32 seconds to deallocate network for instance. [ 1760.375218] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c418485-b6df-43f1-96a0-0335a8084d44 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.389295] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-751ff707-6e46-422a-a75b-c024adbf7c5d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.429163] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b38cf940-fee3-47a0-a60f-d1ffb8c59935 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.437487] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21f4925a-f678-405f-8f4f-02037001ac28 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1760.451082] env[63024]: DEBUG nova.compute.provider_tree [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1760.555895] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950909, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1760.642535] env[63024]: DEBUG nova.network.neutron [req-c02c26ad-fbb0-494f-b1ef-4c09a9ecd3b7 req-0594a663-1c62-4372-bad1-dac5110832be service nova] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Updated VIF entry in instance network info cache for port 83b7b8fb-a30a-4852-889a-ec6b339cc100. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1760.643071] env[63024]: DEBUG nova.network.neutron [req-c02c26ad-fbb0-494f-b1ef-4c09a9ecd3b7 req-0594a663-1c62-4372-bad1-dac5110832be service nova] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Updating instance_info_cache with network_info: [{"id": "83b7b8fb-a30a-4852-889a-ec6b339cc100", "address": "fa:16:3e:cf:79:7d", "network": {"id": "20e77237-56c4-40bb-8203-aa035239c938", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1073367986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12d782556c614caf84a51b37fa43b5de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a64108f9-df0a-4feb-bbb5-97f5841c356c", "external-id": "nsx-vlan-transportzone-67", "segmentation_id": 67, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83b7b8fb-a3", "ovs_interfaceid": "83b7b8fb-a30a-4852-889a-ec6b339cc100", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1760.687694] env[63024]: INFO nova.compute.manager [None req-cfa196c0-94c6-40cd-9df6-630a7786f6a4 tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Took 0.57 seconds to detach 1 volumes for instance. [ 1760.689684] env[63024]: DEBUG nova.compute.manager [None req-cfa196c0-94c6-40cd-9df6-630a7786f6a4 tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Deleting volume: 1e878e2f-2053-4c16-ad0c-263307073b4e {{(pid=63024) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1760.961170] env[63024]: DEBUG nova.scheduler.client.report [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1761.055760] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950909, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.146378] env[63024]: DEBUG oslo_concurrency.lockutils [req-c02c26ad-fbb0-494f-b1ef-4c09a9ecd3b7 req-0594a663-1c62-4372-bad1-dac5110832be service nova] Releasing lock "refresh_cache-b588ea21-dea0-4ee6-8f9e-12007d0a1ce1" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1761.260170] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cfa196c0-94c6-40cd-9df6-630a7786f6a4 tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1761.275903] env[63024]: DEBUG nova.compute.manager [req-866eab3b-7db6-4d9c-bc17-47e5a8df3749 req-c9efdb8c-7bfe-4856-ada8-f60f57a473f2 service nova] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Received event network-vif-deleted-2edee58c-e76b-46ad-b4c8-3b2a70467c01 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1761.465201] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.665s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1761.465717] env[63024]: DEBUG nova.compute.manager [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1761.469853] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 41.837s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1761.471312] env[63024]: INFO nova.compute.claims [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1761.556050] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950909, 'name': CreateVM_Task, 'duration_secs': 1.542774} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1761.556389] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1761.557159] env[63024]: DEBUG oslo_concurrency.lockutils [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1761.557344] env[63024]: DEBUG oslo_concurrency.lockutils [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1761.557663] env[63024]: DEBUG oslo_concurrency.lockutils [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1761.557916] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09c58a29-c42c-462c-a0ef-37fd260b920e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1761.562690] env[63024]: DEBUG oslo_vmware.api [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1761.562690] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]528229f7-37d2-8b53-39bb-1f124adf16bb" [ 1761.562690] env[63024]: _type = "Task" [ 1761.562690] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1761.571389] env[63024]: DEBUG oslo_vmware.api [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]528229f7-37d2-8b53-39bb-1f124adf16bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1761.785369] env[63024]: DEBUG oslo_concurrency.lockutils [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Acquiring lock "1448c924-7c61-4c43-a4e7-5a6dd45375cc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1761.785612] env[63024]: DEBUG oslo_concurrency.lockutils [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Lock "1448c924-7c61-4c43-a4e7-5a6dd45375cc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1761.978039] env[63024]: DEBUG nova.compute.utils [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1761.982525] env[63024]: DEBUG nova.compute.manager [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1761.982722] env[63024]: DEBUG nova.network.neutron [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1762.072080] env[63024]: DEBUG nova.policy [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '73669d10734b403e843e73b3c63bef00', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '12d782556c614caf84a51b37fa43b5de', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1762.084052] env[63024]: DEBUG oslo_vmware.api [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]528229f7-37d2-8b53-39bb-1f124adf16bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.485317] env[63024]: DEBUG nova.compute.manager [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1762.579552] env[63024]: DEBUG oslo_vmware.api [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]528229f7-37d2-8b53-39bb-1f124adf16bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1762.712880] env[63024]: DEBUG nova.network.neutron [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Successfully created port: 8530bff5-1223-4b93-9b55-536f7665048a {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1763.019798] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1e035a5-c36f-4b6c-bb79-c07329ef3f6d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.028309] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a214c29a-22c7-4185-83ed-38ad8530d8d3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.061319] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2381244b-a642-4ac0-8dee-cac0d48d2def {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.068809] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42a5d802-e966-4a76-a72a-a1b5b8bb5e2b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.081590] env[63024]: DEBUG oslo_vmware.api [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]528229f7-37d2-8b53-39bb-1f124adf16bb, 'name': SearchDatastore_Task, 'duration_secs': 1.158634} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1763.090350] env[63024]: DEBUG oslo_concurrency.lockutils [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1763.090651] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1763.090887] env[63024]: DEBUG oslo_concurrency.lockutils [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1763.091045] env[63024]: DEBUG oslo_concurrency.lockutils [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1763.091230] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1763.092031] env[63024]: DEBUG nova.compute.provider_tree [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1763.095681] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4691c5e8-d5d4-4330-b6ec-b9d30d269813 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.104332] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1763.104519] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1763.105306] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18d2f385-2498-497f-bde3-512d284231de {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.113288] env[63024]: DEBUG oslo_vmware.api [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1763.113288] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52a7b8c7-31ed-f058-7c89-0d1c7d6f383b" [ 1763.113288] env[63024]: _type = "Task" [ 1763.113288] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1763.121454] env[63024]: DEBUG oslo_vmware.api [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52a7b8c7-31ed-f058-7c89-0d1c7d6f383b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1763.499884] env[63024]: DEBUG nova.compute.manager [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1763.525303] env[63024]: DEBUG nova.virt.hardware [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1763.525551] env[63024]: DEBUG nova.virt.hardware [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1763.525707] env[63024]: DEBUG nova.virt.hardware [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1763.525884] env[63024]: DEBUG nova.virt.hardware [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1763.526033] env[63024]: DEBUG nova.virt.hardware [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1763.526182] env[63024]: DEBUG nova.virt.hardware [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1763.526388] env[63024]: DEBUG nova.virt.hardware [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1763.526543] env[63024]: DEBUG nova.virt.hardware [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1763.526704] env[63024]: DEBUG nova.virt.hardware [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1763.526859] env[63024]: DEBUG nova.virt.hardware [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1763.527035] env[63024]: DEBUG nova.virt.hardware [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1763.527862] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27788577-0f76-4ebc-aa44-a66bb40703ed {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.535751] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c1d036c-14dd-4075-ad4a-879390f0cad1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.597206] env[63024]: DEBUG nova.scheduler.client.report [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1763.624639] env[63024]: DEBUG oslo_vmware.api [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52a7b8c7-31ed-f058-7c89-0d1c7d6f383b, 'name': SearchDatastore_Task, 'duration_secs': 0.01193} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1763.625456] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8166f3a-62d5-4939-9421-4b171a4a3203 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1763.630626] env[63024]: DEBUG oslo_vmware.api [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1763.630626] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]524ec6e7-1592-4b06-3050-61f963bf7357" [ 1763.630626] env[63024]: _type = "Task" [ 1763.630626] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1763.638558] env[63024]: DEBUG oslo_vmware.api [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]524ec6e7-1592-4b06-3050-61f963bf7357, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.102510] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.633s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1764.103636] env[63024]: DEBUG nova.compute.manager [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1764.105979] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 41.403s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1764.141688] env[63024]: DEBUG oslo_vmware.api [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]524ec6e7-1592-4b06-3050-61f963bf7357, 'name': SearchDatastore_Task, 'duration_secs': 0.009684} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1764.141961] env[63024]: DEBUG oslo_concurrency.lockutils [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1764.142237] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] b588ea21-dea0-4ee6-8f9e-12007d0a1ce1/b588ea21-dea0-4ee6-8f9e-12007d0a1ce1.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1764.142496] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8166e6e1-4853-4708-82c3-10008d305159 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1764.150282] env[63024]: DEBUG oslo_vmware.api [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1764.150282] env[63024]: value = "task-1950911" [ 1764.150282] env[63024]: _type = "Task" [ 1764.150282] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1764.160555] env[63024]: DEBUG oslo_vmware.api [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950911, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.249498] env[63024]: DEBUG nova.compute.manager [req-c82734f5-cb91-4063-9eb9-8078755bada9 req-a09493bf-d193-4732-b992-df52d0c85a6d service nova] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Received event network-vif-plugged-8530bff5-1223-4b93-9b55-536f7665048a {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1764.249721] env[63024]: DEBUG oslo_concurrency.lockutils [req-c82734f5-cb91-4063-9eb9-8078755bada9 req-a09493bf-d193-4732-b992-df52d0c85a6d service nova] Acquiring lock "31a693b6-293a-4f01-9baf-a9e7e8d453d4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1764.249927] env[63024]: DEBUG oslo_concurrency.lockutils [req-c82734f5-cb91-4063-9eb9-8078755bada9 req-a09493bf-d193-4732-b992-df52d0c85a6d service nova] Lock "31a693b6-293a-4f01-9baf-a9e7e8d453d4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1764.250107] env[63024]: DEBUG oslo_concurrency.lockutils [req-c82734f5-cb91-4063-9eb9-8078755bada9 req-a09493bf-d193-4732-b992-df52d0c85a6d service nova] Lock "31a693b6-293a-4f01-9baf-a9e7e8d453d4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1764.250274] env[63024]: DEBUG nova.compute.manager [req-c82734f5-cb91-4063-9eb9-8078755bada9 req-a09493bf-d193-4732-b992-df52d0c85a6d service nova] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] No waiting events found dispatching network-vif-plugged-8530bff5-1223-4b93-9b55-536f7665048a {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1764.250437] env[63024]: WARNING nova.compute.manager [req-c82734f5-cb91-4063-9eb9-8078755bada9 req-a09493bf-d193-4732-b992-df52d0c85a6d service nova] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Received unexpected event network-vif-plugged-8530bff5-1223-4b93-9b55-536f7665048a for instance with vm_state building and task_state spawning. [ 1764.367827] env[63024]: DEBUG nova.network.neutron [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Successfully updated port: 8530bff5-1223-4b93-9b55-536f7665048a {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1764.611561] env[63024]: INFO nova.compute.claims [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1764.620026] env[63024]: DEBUG nova.compute.utils [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1764.620026] env[63024]: DEBUG nova.compute.manager [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1764.620026] env[63024]: DEBUG nova.network.neutron [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1764.663818] env[63024]: DEBUG oslo_vmware.api [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950911, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1764.697520] env[63024]: DEBUG nova.policy [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e952ee9800ae4a42ba61a59f4f4937de', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e38e52320899407c98092b4ea5964195', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1764.877276] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquiring lock "refresh_cache-31a693b6-293a-4f01-9baf-a9e7e8d453d4" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1764.877465] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquired lock "refresh_cache-31a693b6-293a-4f01-9baf-a9e7e8d453d4" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1764.877595] env[63024]: DEBUG nova.network.neutron [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1765.119789] env[63024]: INFO nova.compute.resource_tracker [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Updating resource usage from migration cb60c03c-4816-43a9-a522-d9e511f6ee40 [ 1765.124512] env[63024]: DEBUG nova.compute.manager [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1765.127748] env[63024]: DEBUG nova.network.neutron [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] Successfully created port: cc0bbdd4-4d81-4cfa-a689-4d95dfdf81d5 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1765.165554] env[63024]: DEBUG oslo_vmware.api [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950911, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.522314} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1765.165843] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] b588ea21-dea0-4ee6-8f9e-12007d0a1ce1/b588ea21-dea0-4ee6-8f9e-12007d0a1ce1.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1765.166130] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1765.166432] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5157f9fa-0f36-45ce-b4fe-5018a7719075 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.176361] env[63024]: DEBUG oslo_vmware.api [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1765.176361] env[63024]: value = "task-1950912" [ 1765.176361] env[63024]: _type = "Task" [ 1765.176361] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1765.188202] env[63024]: DEBUG oslo_vmware.api [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950912, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.434031] env[63024]: DEBUG nova.network.neutron [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1765.686967] env[63024]: DEBUG oslo_vmware.api [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950912, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.224315} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1765.688188] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1765.689446] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26ce9718-4d22-421e-8977-29032ffba197 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.696082] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0175912-8d80-4aef-804e-2a4aed21907b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.720614] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Reconfiguring VM instance instance-0000002f to attach disk [datastore1] b588ea21-dea0-4ee6-8f9e-12007d0a1ce1/b588ea21-dea0-4ee6-8f9e-12007d0a1ce1.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1765.723766] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e16eddcc-2f30-44d2-97ab-67b1a84fa847 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.744920] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fa2d2b1-e688-43b9-9912-d16e59df2e21 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.780647] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-852d5ed2-651c-467a-9214-d3644c69e171 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.783610] env[63024]: DEBUG oslo_vmware.api [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1765.783610] env[63024]: value = "task-1950913" [ 1765.783610] env[63024]: _type = "Task" [ 1765.783610] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1765.790504] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da8ff993-1f98-4c02-8fbc-28ce12061e81 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1765.797652] env[63024]: DEBUG oslo_vmware.api [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950913, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1765.807259] env[63024]: DEBUG nova.compute.provider_tree [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1765.927504] env[63024]: DEBUG nova.network.neutron [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Updating instance_info_cache with network_info: [{"id": "8530bff5-1223-4b93-9b55-536f7665048a", "address": "fa:16:3e:3c:2c:8c", "network": {"id": "20e77237-56c4-40bb-8203-aa035239c938", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1073367986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12d782556c614caf84a51b37fa43b5de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a64108f9-df0a-4feb-bbb5-97f5841c356c", "external-id": "nsx-vlan-transportzone-67", "segmentation_id": 67, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8530bff5-12", "ovs_interfaceid": "8530bff5-1223-4b93-9b55-536f7665048a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1766.143169] env[63024]: DEBUG nova.compute.manager [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1766.173395] env[63024]: DEBUG nova.virt.hardware [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1766.174932] env[63024]: DEBUG nova.virt.hardware [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1766.175268] env[63024]: DEBUG nova.virt.hardware [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1766.175953] env[63024]: DEBUG nova.virt.hardware [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1766.179198] env[63024]: DEBUG nova.virt.hardware [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1766.179198] env[63024]: DEBUG nova.virt.hardware [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1766.179198] env[63024]: DEBUG nova.virt.hardware [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1766.179198] env[63024]: DEBUG nova.virt.hardware [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1766.179198] env[63024]: DEBUG nova.virt.hardware [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1766.179198] env[63024]: DEBUG nova.virt.hardware [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1766.179198] env[63024]: DEBUG nova.virt.hardware [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1766.179198] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e99a491d-c44a-4c24-af42-daf9a92f55f9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.189419] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef726642-9522-458f-935b-8a0ee653f1d9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.297018] env[63024]: DEBUG oslo_vmware.api [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950913, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.310687] env[63024]: DEBUG nova.scheduler.client.report [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1766.429771] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Releasing lock "refresh_cache-31a693b6-293a-4f01-9baf-a9e7e8d453d4" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1766.430143] env[63024]: DEBUG nova.compute.manager [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Instance network_info: |[{"id": "8530bff5-1223-4b93-9b55-536f7665048a", "address": "fa:16:3e:3c:2c:8c", "network": {"id": "20e77237-56c4-40bb-8203-aa035239c938", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1073367986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12d782556c614caf84a51b37fa43b5de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a64108f9-df0a-4feb-bbb5-97f5841c356c", "external-id": "nsx-vlan-transportzone-67", "segmentation_id": 67, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8530bff5-12", "ovs_interfaceid": "8530bff5-1223-4b93-9b55-536f7665048a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1766.430635] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3c:2c:8c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a64108f9-df0a-4feb-bbb5-97f5841c356c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8530bff5-1223-4b93-9b55-536f7665048a', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1766.439821] env[63024]: DEBUG oslo.service.loopingcall [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1766.440093] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1766.440345] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b7d8d14a-68fc-435d-8c9f-155c09ed1601 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.463928] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1766.463928] env[63024]: value = "task-1950914" [ 1766.463928] env[63024]: _type = "Task" [ 1766.463928] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1766.470208] env[63024]: DEBUG nova.compute.manager [req-6bb08577-6d4e-47dc-aa36-21b5e2693ae3 req-e4a45973-7387-48bd-aa57-f892070a4d2b service nova] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Received event network-changed-8530bff5-1223-4b93-9b55-536f7665048a {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1766.470208] env[63024]: DEBUG nova.compute.manager [req-6bb08577-6d4e-47dc-aa36-21b5e2693ae3 req-e4a45973-7387-48bd-aa57-f892070a4d2b service nova] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Refreshing instance network info cache due to event network-changed-8530bff5-1223-4b93-9b55-536f7665048a. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1766.470383] env[63024]: DEBUG oslo_concurrency.lockutils [req-6bb08577-6d4e-47dc-aa36-21b5e2693ae3 req-e4a45973-7387-48bd-aa57-f892070a4d2b service nova] Acquiring lock "refresh_cache-31a693b6-293a-4f01-9baf-a9e7e8d453d4" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1766.470546] env[63024]: DEBUG oslo_concurrency.lockutils [req-6bb08577-6d4e-47dc-aa36-21b5e2693ae3 req-e4a45973-7387-48bd-aa57-f892070a4d2b service nova] Acquired lock "refresh_cache-31a693b6-293a-4f01-9baf-a9e7e8d453d4" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1766.470706] env[63024]: DEBUG nova.network.neutron [req-6bb08577-6d4e-47dc-aa36-21b5e2693ae3 req-e4a45973-7387-48bd-aa57-f892070a4d2b service nova] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Refreshing network info cache for port 8530bff5-1223-4b93-9b55-536f7665048a {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1766.478712] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950914, 'name': CreateVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.794958] env[63024]: DEBUG oslo_vmware.api [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950913, 'name': ReconfigVM_Task, 'duration_secs': 0.702583} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1766.795743] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Reconfigured VM instance instance-0000002f to attach disk [datastore1] b588ea21-dea0-4ee6-8f9e-12007d0a1ce1/b588ea21-dea0-4ee6-8f9e-12007d0a1ce1.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1766.796390] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c4facf98-07df-4969-aad3-b34557665fbb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.802853] env[63024]: DEBUG oslo_vmware.api [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1766.802853] env[63024]: value = "task-1950915" [ 1766.802853] env[63024]: _type = "Task" [ 1766.802853] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1766.811956] env[63024]: DEBUG oslo_vmware.api [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950915, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1766.818457] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.710s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1766.818457] env[63024]: INFO nova.compute.manager [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Migrating [ 1766.825408] env[63024]: DEBUG oslo_concurrency.lockutils [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 40.129s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1766.827585] env[63024]: INFO nova.compute.claims [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1766.976184] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950914, 'name': CreateVM_Task, 'duration_secs': 0.309654} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1766.976184] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1766.978141] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1766.978141] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1766.978141] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1766.978141] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f34cd254-3e33-4b98-9911-eacfbf1fe1bc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1766.983023] env[63024]: DEBUG oslo_vmware.api [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1766.983023] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]522e0b73-eafd-087c-6628-6eca12207d57" [ 1766.983023] env[63024]: _type = "Task" [ 1766.983023] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1766.993644] env[63024]: DEBUG oslo_vmware.api [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]522e0b73-eafd-087c-6628-6eca12207d57, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.313529] env[63024]: DEBUG oslo_vmware.api [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950915, 'name': Rename_Task, 'duration_secs': 0.137551} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1767.313808] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1767.318563] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-73084123-c34e-49e8-8d8d-486a523038fe {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.331388] env[63024]: DEBUG oslo_vmware.api [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1767.331388] env[63024]: value = "task-1950916" [ 1767.331388] env[63024]: _type = "Task" [ 1767.331388] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1767.339863] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Acquiring lock "refresh_cache-650a97b9-911e-44b0-9e82-a6d4cc95c9dd" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1767.340014] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Acquired lock "refresh_cache-650a97b9-911e-44b0-9e82-a6d4cc95c9dd" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1767.340247] env[63024]: DEBUG nova.network.neutron [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1767.341454] env[63024]: DEBUG oslo_vmware.api [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950916, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.408915] env[63024]: DEBUG nova.network.neutron [req-6bb08577-6d4e-47dc-aa36-21b5e2693ae3 req-e4a45973-7387-48bd-aa57-f892070a4d2b service nova] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Updated VIF entry in instance network info cache for port 8530bff5-1223-4b93-9b55-536f7665048a. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1767.409834] env[63024]: DEBUG nova.network.neutron [req-6bb08577-6d4e-47dc-aa36-21b5e2693ae3 req-e4a45973-7387-48bd-aa57-f892070a4d2b service nova] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Updating instance_info_cache with network_info: [{"id": "8530bff5-1223-4b93-9b55-536f7665048a", "address": "fa:16:3e:3c:2c:8c", "network": {"id": "20e77237-56c4-40bb-8203-aa035239c938", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1073367986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12d782556c614caf84a51b37fa43b5de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a64108f9-df0a-4feb-bbb5-97f5841c356c", "external-id": "nsx-vlan-transportzone-67", "segmentation_id": 67, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8530bff5-12", "ovs_interfaceid": "8530bff5-1223-4b93-9b55-536f7665048a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1767.489769] env[63024]: DEBUG nova.network.neutron [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] Successfully updated port: cc0bbdd4-4d81-4cfa-a689-4d95dfdf81d5 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1767.497215] env[63024]: DEBUG oslo_vmware.api [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]522e0b73-eafd-087c-6628-6eca12207d57, 'name': SearchDatastore_Task, 'duration_secs': 0.015992} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1767.497588] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1767.498017] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1767.498106] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1767.498259] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1767.498641] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1767.499074] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f5d38216-f521-41b1-9130-b88014160a08 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.507659] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1767.507823] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1767.508564] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-874e58a7-c1a5-42fe-bb14-253db91c212f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1767.514220] env[63024]: DEBUG oslo_vmware.api [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1767.514220] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]526777fb-be77-7e8d-8b26-e621a1ce7ce9" [ 1767.514220] env[63024]: _type = "Task" [ 1767.514220] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1767.522338] env[63024]: DEBUG oslo_vmware.api [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]526777fb-be77-7e8d-8b26-e621a1ce7ce9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.759090] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Acquiring lock "73db94b8-cfa8-4457-bccb-d4b780edbd93" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1767.759345] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lock "73db94b8-cfa8-4457-bccb-d4b780edbd93" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1767.841432] env[63024]: DEBUG oslo_vmware.api [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950916, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1767.911906] env[63024]: DEBUG oslo_concurrency.lockutils [req-6bb08577-6d4e-47dc-aa36-21b5e2693ae3 req-e4a45973-7387-48bd-aa57-f892070a4d2b service nova] Releasing lock "refresh_cache-31a693b6-293a-4f01-9baf-a9e7e8d453d4" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1767.991445] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Acquiring lock "refresh_cache-5c2efe96-4ac4-4693-9203-43407d768f66" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1767.991583] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Acquired lock "refresh_cache-5c2efe96-4ac4-4693-9203-43407d768f66" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1767.991897] env[63024]: DEBUG nova.network.neutron [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1768.026499] env[63024]: DEBUG oslo_vmware.api [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]526777fb-be77-7e8d-8b26-e621a1ce7ce9, 'name': SearchDatastore_Task, 'duration_secs': 0.008105} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1768.027308] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b674e0dc-7ede-4cc8-b452-6b98e58eb886 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.035471] env[63024]: DEBUG oslo_vmware.api [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1768.035471] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5200d154-e7ca-3fbe-b5f5-34c227c16080" [ 1768.035471] env[63024]: _type = "Task" [ 1768.035471] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1768.045601] env[63024]: DEBUG oslo_vmware.api [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5200d154-e7ca-3fbe-b5f5-34c227c16080, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1768.081275] env[63024]: DEBUG nova.network.neutron [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Updating instance_info_cache with network_info: [{"id": "241606ef-afe1-4ca8-912c-dae7639e4941", "address": "fa:16:3e:fc:f3:01", "network": {"id": "b22bfe1a-f169-41c3-ac9b-fdcf93268110", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.130", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d78401abb63840f4b461856cfdb6dbbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap241606ef-af", "ovs_interfaceid": "241606ef-afe1-4ca8-912c-dae7639e4941", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1768.277667] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebb3ca30-0027-4c9b-a73e-ee065c139f6c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.287015] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74a740c4-8640-4e9d-82e4-4d3074735914 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.317132] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9d2c6bb-2ffa-442a-8454-866f9ebd16de {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.324743] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67103f3b-1f37-457a-9855-b89eb4a3f3b3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.339667] env[63024]: DEBUG nova.compute.provider_tree [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1768.348051] env[63024]: DEBUG oslo_vmware.api [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950916, 'name': PowerOnVM_Task, 'duration_secs': 0.717019} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1768.348814] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1768.349041] env[63024]: INFO nova.compute.manager [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Took 12.26 seconds to spawn the instance on the hypervisor. [ 1768.349238] env[63024]: DEBUG nova.compute.manager [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1768.349958] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5780dbd6-6b49-42e7-97d2-f91b8e27550f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.496999] env[63024]: DEBUG nova.compute.manager [req-29fb50fa-03d3-4ae5-9a63-6af0b23889ef req-e136f9aa-63db-419c-baeb-45293bb5c79f service nova] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] Received event network-vif-plugged-cc0bbdd4-4d81-4cfa-a689-4d95dfdf81d5 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1768.497243] env[63024]: DEBUG oslo_concurrency.lockutils [req-29fb50fa-03d3-4ae5-9a63-6af0b23889ef req-e136f9aa-63db-419c-baeb-45293bb5c79f service nova] Acquiring lock "5c2efe96-4ac4-4693-9203-43407d768f66-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1768.497456] env[63024]: DEBUG oslo_concurrency.lockutils [req-29fb50fa-03d3-4ae5-9a63-6af0b23889ef req-e136f9aa-63db-419c-baeb-45293bb5c79f service nova] Lock "5c2efe96-4ac4-4693-9203-43407d768f66-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1768.497618] env[63024]: DEBUG oslo_concurrency.lockutils [req-29fb50fa-03d3-4ae5-9a63-6af0b23889ef req-e136f9aa-63db-419c-baeb-45293bb5c79f service nova] Lock "5c2efe96-4ac4-4693-9203-43407d768f66-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1768.497784] env[63024]: DEBUG nova.compute.manager [req-29fb50fa-03d3-4ae5-9a63-6af0b23889ef req-e136f9aa-63db-419c-baeb-45293bb5c79f service nova] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] No waiting events found dispatching network-vif-plugged-cc0bbdd4-4d81-4cfa-a689-4d95dfdf81d5 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1768.497943] env[63024]: WARNING nova.compute.manager [req-29fb50fa-03d3-4ae5-9a63-6af0b23889ef req-e136f9aa-63db-419c-baeb-45293bb5c79f service nova] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] Received unexpected event network-vif-plugged-cc0bbdd4-4d81-4cfa-a689-4d95dfdf81d5 for instance with vm_state building and task_state spawning. [ 1768.498113] env[63024]: DEBUG nova.compute.manager [req-29fb50fa-03d3-4ae5-9a63-6af0b23889ef req-e136f9aa-63db-419c-baeb-45293bb5c79f service nova] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] Received event network-changed-cc0bbdd4-4d81-4cfa-a689-4d95dfdf81d5 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1768.498264] env[63024]: DEBUG nova.compute.manager [req-29fb50fa-03d3-4ae5-9a63-6af0b23889ef req-e136f9aa-63db-419c-baeb-45293bb5c79f service nova] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] Refreshing instance network info cache due to event network-changed-cc0bbdd4-4d81-4cfa-a689-4d95dfdf81d5. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1768.498440] env[63024]: DEBUG oslo_concurrency.lockutils [req-29fb50fa-03d3-4ae5-9a63-6af0b23889ef req-e136f9aa-63db-419c-baeb-45293bb5c79f service nova] Acquiring lock "refresh_cache-5c2efe96-4ac4-4693-9203-43407d768f66" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1768.545533] env[63024]: DEBUG oslo_vmware.api [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5200d154-e7ca-3fbe-b5f5-34c227c16080, 'name': SearchDatastore_Task, 'duration_secs': 0.009977} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1768.546272] env[63024]: DEBUG nova.network.neutron [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1768.548047] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1768.548300] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 31a693b6-293a-4f01-9baf-a9e7e8d453d4/31a693b6-293a-4f01-9baf-a9e7e8d453d4.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1768.548644] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8409971f-86c4-40d8-92d0-eee9f819ae6e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1768.555909] env[63024]: DEBUG oslo_vmware.api [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1768.555909] env[63024]: value = "task-1950917" [ 1768.555909] env[63024]: _type = "Task" [ 1768.555909] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1768.571734] env[63024]: DEBUG oslo_vmware.api [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950917, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1768.585460] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Releasing lock "refresh_cache-650a97b9-911e-44b0-9e82-a6d4cc95c9dd" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1768.792322] env[63024]: DEBUG nova.network.neutron [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] Updating instance_info_cache with network_info: [{"id": "cc0bbdd4-4d81-4cfa-a689-4d95dfdf81d5", "address": "fa:16:3e:c7:b1:ba", "network": {"id": "35e43372-6a66-46c6-90ec-04f1ad6bdab5", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1684682161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e38e52320899407c98092b4ea5964195", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4f91f31-0516-4d62-a341-e03a50b7c477", "external-id": "nsx-vlan-transportzone-963", "segmentation_id": 963, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc0bbdd4-4d", "ovs_interfaceid": "cc0bbdd4-4d81-4cfa-a689-4d95dfdf81d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1768.845175] env[63024]: DEBUG nova.scheduler.client.report [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1768.869108] env[63024]: INFO nova.compute.manager [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Took 55.97 seconds to build instance. [ 1769.066055] env[63024]: DEBUG oslo_vmware.api [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950917, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.469716} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1769.066214] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 31a693b6-293a-4f01-9baf-a9e7e8d453d4/31a693b6-293a-4f01-9baf-a9e7e8d453d4.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1769.066404] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1769.066648] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-141d3a74-0b4f-49c5-b43d-e42186658156 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.072948] env[63024]: DEBUG oslo_vmware.api [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1769.072948] env[63024]: value = "task-1950918" [ 1769.072948] env[63024]: _type = "Task" [ 1769.072948] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1769.080611] env[63024]: DEBUG oslo_vmware.api [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950918, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1769.300028] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Releasing lock "refresh_cache-5c2efe96-4ac4-4693-9203-43407d768f66" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1769.300028] env[63024]: DEBUG nova.compute.manager [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] Instance network_info: |[{"id": "cc0bbdd4-4d81-4cfa-a689-4d95dfdf81d5", "address": "fa:16:3e:c7:b1:ba", "network": {"id": "35e43372-6a66-46c6-90ec-04f1ad6bdab5", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1684682161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e38e52320899407c98092b4ea5964195", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4f91f31-0516-4d62-a341-e03a50b7c477", "external-id": "nsx-vlan-transportzone-963", "segmentation_id": 963, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc0bbdd4-4d", "ovs_interfaceid": "cc0bbdd4-4d81-4cfa-a689-4d95dfdf81d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1769.300028] env[63024]: DEBUG oslo_concurrency.lockutils [req-29fb50fa-03d3-4ae5-9a63-6af0b23889ef req-e136f9aa-63db-419c-baeb-45293bb5c79f service nova] Acquired lock "refresh_cache-5c2efe96-4ac4-4693-9203-43407d768f66" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1769.300028] env[63024]: DEBUG nova.network.neutron [req-29fb50fa-03d3-4ae5-9a63-6af0b23889ef req-e136f9aa-63db-419c-baeb-45293bb5c79f service nova] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] Refreshing network info cache for port cc0bbdd4-4d81-4cfa-a689-4d95dfdf81d5 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1769.301155] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c7:b1:ba', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a4f91f31-0516-4d62-a341-e03a50b7c477', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cc0bbdd4-4d81-4cfa-a689-4d95dfdf81d5', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1769.309774] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Creating folder: Project (e38e52320899407c98092b4ea5964195). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1769.312776] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-78c05001-33eb-4a8b-88b7-5ee8b0ad06e7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.324306] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Created folder: Project (e38e52320899407c98092b4ea5964195) in parent group-v401959. [ 1769.324622] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Creating folder: Instances. Parent ref: group-v402111. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1769.324836] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-393f38a1-45ab-4142-9937-1f5c7581327e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.336630] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Created folder: Instances in parent group-v402111. [ 1769.336921] env[63024]: DEBUG oslo.service.loopingcall [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1769.337182] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1769.337437] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-11fe8c62-f288-4b53-85ec-0e8a0cfc56d6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.355919] env[63024]: DEBUG oslo_concurrency.lockutils [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.531s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1769.356701] env[63024]: DEBUG nova.compute.manager [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1769.359487] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 41.337s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1769.360983] env[63024]: INFO nova.compute.claims [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1769.368938] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1769.368938] env[63024]: value = "task-1950921" [ 1769.368938] env[63024]: _type = "Task" [ 1769.368938] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1769.373031] env[63024]: DEBUG oslo_concurrency.lockutils [None req-105265c9-cf8d-473b-a981-c2f6430845ea tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Lock "b588ea21-dea0-4ee6-8f9e-12007d0a1ce1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.552s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1769.378243] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950921, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1769.579370] env[63024]: DEBUG nova.network.neutron [req-29fb50fa-03d3-4ae5-9a63-6af0b23889ef req-e136f9aa-63db-419c-baeb-45293bb5c79f service nova] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] Updated VIF entry in instance network info cache for port cc0bbdd4-4d81-4cfa-a689-4d95dfdf81d5. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1769.579747] env[63024]: DEBUG nova.network.neutron [req-29fb50fa-03d3-4ae5-9a63-6af0b23889ef req-e136f9aa-63db-419c-baeb-45293bb5c79f service nova] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] Updating instance_info_cache with network_info: [{"id": "cc0bbdd4-4d81-4cfa-a689-4d95dfdf81d5", "address": "fa:16:3e:c7:b1:ba", "network": {"id": "35e43372-6a66-46c6-90ec-04f1ad6bdab5", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1684682161-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e38e52320899407c98092b4ea5964195", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a4f91f31-0516-4d62-a341-e03a50b7c477", "external-id": "nsx-vlan-transportzone-963", "segmentation_id": 963, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcc0bbdd4-4d", "ovs_interfaceid": "cc0bbdd4-4d81-4cfa-a689-4d95dfdf81d5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1769.586479] env[63024]: DEBUG oslo_vmware.api [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950918, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066576} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1769.586810] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1769.587930] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71d732b7-1049-4a98-8276-010f6a66ad5e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.613803] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Reconfiguring VM instance instance-00000030 to attach disk [datastore1] 31a693b6-293a-4f01-9baf-a9e7e8d453d4/31a693b6-293a-4f01-9baf-a9e7e8d453d4.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1769.616297] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7c5c85ac-4c98-4742-8891-f085fe9d3174 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.631374] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ecd01eb-7198-444c-a2e1-a5f1f0ffb2db {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.650147] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Updating instance '650a97b9-911e-44b0-9e82-a6d4cc95c9dd' progress to 0 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1769.655073] env[63024]: DEBUG oslo_vmware.api [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1769.655073] env[63024]: value = "task-1950922" [ 1769.655073] env[63024]: _type = "Task" [ 1769.655073] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1769.663576] env[63024]: DEBUG oslo_vmware.api [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950922, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1769.867964] env[63024]: DEBUG nova.compute.utils [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1769.869502] env[63024]: DEBUG nova.compute.manager [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1769.869690] env[63024]: DEBUG nova.network.neutron [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] [instance: 601a003d-811c-4698-b0b6-054482d32c21] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1769.879032] env[63024]: DEBUG nova.compute.manager [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1769.885066] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950921, 'name': CreateVM_Task, 'duration_secs': 0.351279} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1769.885442] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1769.886168] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1769.886333] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1769.886676] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1769.886937] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-95be1e9e-453d-4b32-bd35-795383bb696e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1769.891895] env[63024]: DEBUG oslo_vmware.api [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Waiting for the task: (returnval){ [ 1769.891895] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52dd754c-c844-2154-2bbf-b23175ce237a" [ 1769.891895] env[63024]: _type = "Task" [ 1769.891895] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1769.906123] env[63024]: DEBUG oslo_vmware.api [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52dd754c-c844-2154-2bbf-b23175ce237a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1769.940130] env[63024]: DEBUG nova.policy [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bda8b151c879486cb72bff214eb4fcbd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6a85ae55ce0e4f829944ed37204fc98a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1770.083172] env[63024]: DEBUG oslo_concurrency.lockutils [req-29fb50fa-03d3-4ae5-9a63-6af0b23889ef req-e136f9aa-63db-419c-baeb-45293bb5c79f service nova] Releasing lock "refresh_cache-5c2efe96-4ac4-4693-9203-43407d768f66" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1770.157473] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1770.157849] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d5c6e106-8b1c-4f52-af3a-cda5d6c671f8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.168853] env[63024]: DEBUG oslo_vmware.api [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950922, 'name': ReconfigVM_Task, 'duration_secs': 0.255606} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1770.173018] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Reconfigured VM instance instance-00000030 to attach disk [datastore1] 31a693b6-293a-4f01-9baf-a9e7e8d453d4/31a693b6-293a-4f01-9baf-a9e7e8d453d4.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1770.173018] env[63024]: DEBUG oslo_vmware.api [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Waiting for the task: (returnval){ [ 1770.173018] env[63024]: value = "task-1950923" [ 1770.173018] env[63024]: _type = "Task" [ 1770.173018] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1770.173018] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e5ff3229-95da-4ea5-b606-4cadf93ca1ef {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.181481] env[63024]: DEBUG oslo_vmware.api [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950923, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1770.182855] env[63024]: DEBUG oslo_vmware.api [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1770.182855] env[63024]: value = "task-1950924" [ 1770.182855] env[63024]: _type = "Task" [ 1770.182855] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1770.192047] env[63024]: DEBUG oslo_vmware.api [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950924, 'name': Rename_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1770.374786] env[63024]: DEBUG nova.compute.manager [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1770.401881] env[63024]: DEBUG oslo_vmware.api [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52dd754c-c844-2154-2bbf-b23175ce237a, 'name': SearchDatastore_Task, 'duration_secs': 0.008467} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1770.404863] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1770.405173] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1770.405425] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1770.405588] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1770.405767] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1770.406308] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-644d3fdb-d2d6-436a-b176-b293a9ecebc7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.409106] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1770.415714] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1770.415926] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1770.416864] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e87a3bca-83ab-4490-b778-1479ec19d2f0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.426399] env[63024]: DEBUG nova.network.neutron [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Successfully created port: 2c1caf53-f9b6-4184-b807-b496dcae4cbb {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1770.429901] env[63024]: DEBUG oslo_vmware.api [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Waiting for the task: (returnval){ [ 1770.429901] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b02dc6-eb6a-d048-76b6-2c4d5023ef4f" [ 1770.429901] env[63024]: _type = "Task" [ 1770.429901] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1770.443857] env[63024]: DEBUG oslo_vmware.api [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b02dc6-eb6a-d048-76b6-2c4d5023ef4f, 'name': SearchDatastore_Task, 'duration_secs': 0.008809} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1770.444698] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd00af7e-9925-4bea-9db1-e384cbb9eef6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.451822] env[63024]: DEBUG oslo_vmware.api [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Waiting for the task: (returnval){ [ 1770.451822] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5276281b-fa07-db07-2bc9-3bf356e98a06" [ 1770.451822] env[63024]: _type = "Task" [ 1770.451822] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1770.461022] env[63024]: DEBUG oslo_vmware.api [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5276281b-fa07-db07-2bc9-3bf356e98a06, 'name': SearchDatastore_Task, 'duration_secs': 0.008379} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1770.463716] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1770.463981] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 5c2efe96-4ac4-4693-9203-43407d768f66/5c2efe96-4ac4-4693-9203-43407d768f66.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1770.464498] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8dd0c7e3-8b99-434e-aee4-651667173dd8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.471553] env[63024]: DEBUG oslo_vmware.api [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Waiting for the task: (returnval){ [ 1770.471553] env[63024]: value = "task-1950925" [ 1770.471553] env[63024]: _type = "Task" [ 1770.471553] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1770.482279] env[63024]: DEBUG oslo_vmware.api [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Task: {'id': task-1950925, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1770.687176] env[63024]: DEBUG oslo_vmware.api [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950923, 'name': PowerOffVM_Task, 'duration_secs': 0.201901} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1770.691697] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1770.692664] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Updating instance '650a97b9-911e-44b0-9e82-a6d4cc95c9dd' progress to 17 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1770.705419] env[63024]: DEBUG oslo_vmware.api [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950924, 'name': Rename_Task, 'duration_secs': 0.172037} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1770.705419] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1770.705419] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f37edc36-0866-4db8-afe5-9037bf7680c2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.711933] env[63024]: DEBUG oslo_vmware.api [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1770.711933] env[63024]: value = "task-1950926" [ 1770.711933] env[63024]: _type = "Task" [ 1770.711933] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1770.723422] env[63024]: DEBUG oslo_vmware.api [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950926, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1770.882097] env[63024]: INFO nova.virt.block_device [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Booting with volume 51f1feea-0afc-4d43-8b47-c2e3f20d424c at /dev/sda [ 1770.936098] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-138634bd-9713-4aa2-873a-a927e479f3a0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.944496] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8b381ce-37de-47f0-9006-18143aff5c59 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.988817] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-97443318-caa5-410e-aeaf-ea042ead3784 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1770.996526] env[63024]: DEBUG oslo_vmware.api [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Task: {'id': task-1950925, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.467633} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1770.997618] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 5c2efe96-4ac4-4693-9203-43407d768f66/5c2efe96-4ac4-4693-9203-43407d768f66.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1770.997837] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1770.998139] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d18752df-28ab-4247-9dfd-19f93b629365 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.002814] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1ed67a8-e0b1-4d7d-bffd-42fb21f8dfed {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.021428] env[63024]: DEBUG oslo_vmware.api [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Waiting for the task: (returnval){ [ 1771.021428] env[63024]: value = "task-1950927" [ 1771.021428] env[63024]: _type = "Task" [ 1771.021428] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1771.029632] env[63024]: DEBUG oslo_vmware.api [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Task: {'id': task-1950927, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.042690] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1d7d976-7ee4-4607-a493-b345ff30e82e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.048895] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6524f6ce-86ac-4959-99ba-653dacc946d8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.065443] env[63024]: DEBUG nova.virt.block_device [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Updating existing volume attachment record: 42bbb9f1-07f2-482d-9f2e-d69f77ff6d55 {{(pid=63024) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1771.069287] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03cc7a00-f5d2-4b63-9f22-adcb2e0634ed {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.106816] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16ec526e-48c0-49b5-9536-8133a04193e3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.108865] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3ffad9a-64b2-4fd7-8222-f585879a394c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.118946] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80e95f34-65aa-47c7-b04e-4315fc1d0689 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.129958] env[63024]: DEBUG nova.compute.provider_tree [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1771.209691] env[63024]: DEBUG nova.virt.hardware [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1771.210056] env[63024]: DEBUG nova.virt.hardware [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1771.210895] env[63024]: DEBUG nova.virt.hardware [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1771.216685] env[63024]: DEBUG nova.virt.hardware [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1771.216685] env[63024]: DEBUG nova.virt.hardware [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1771.216685] env[63024]: DEBUG nova.virt.hardware [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1771.216685] env[63024]: DEBUG nova.virt.hardware [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1771.216685] env[63024]: DEBUG nova.virt.hardware [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1771.216685] env[63024]: DEBUG nova.virt.hardware [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1771.216685] env[63024]: DEBUG nova.virt.hardware [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1771.216685] env[63024]: DEBUG nova.virt.hardware [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1771.230014] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0758ff0a-b87f-4ff4-ad3a-f619f543b117 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.272637] env[63024]: DEBUG oslo_vmware.api [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950926, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.276160] env[63024]: DEBUG oslo_vmware.api [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Waiting for the task: (returnval){ [ 1771.276160] env[63024]: value = "task-1950928" [ 1771.276160] env[63024]: _type = "Task" [ 1771.276160] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1771.285585] env[63024]: DEBUG oslo_vmware.api [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950928, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.531463] env[63024]: DEBUG oslo_vmware.api [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Task: {'id': task-1950927, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081026} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1771.531941] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1771.532844] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf5241d5-fcfc-4442-a7a1-147d698dd6b5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.556515] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] Reconfiguring VM instance instance-00000031 to attach disk [datastore1] 5c2efe96-4ac4-4693-9203-43407d768f66/5c2efe96-4ac4-4693-9203-43407d768f66.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1771.557950] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-67447c4c-e35a-4cf4-a7ed-fda798733d28 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.580495] env[63024]: DEBUG oslo_vmware.api [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Waiting for the task: (returnval){ [ 1771.580495] env[63024]: value = "task-1950929" [ 1771.580495] env[63024]: _type = "Task" [ 1771.580495] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1771.589383] env[63024]: DEBUG oslo_vmware.api [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Task: {'id': task-1950929, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1771.633626] env[63024]: DEBUG nova.scheduler.client.report [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1771.736871] env[63024]: DEBUG oslo_vmware.api [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950926, 'name': PowerOnVM_Task, 'duration_secs': 0.581361} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1771.737196] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1771.737421] env[63024]: INFO nova.compute.manager [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Took 8.24 seconds to spawn the instance on the hypervisor. [ 1771.737634] env[63024]: DEBUG nova.compute.manager [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1771.738446] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c662faa5-430e-4b45-932b-49f6c3d1e76c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1771.788614] env[63024]: DEBUG oslo_vmware.api [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950928, 'name': ReconfigVM_Task, 'duration_secs': 0.344615} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1771.790310] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Updating instance '650a97b9-911e-44b0-9e82-a6d4cc95c9dd' progress to 33 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1772.094790] env[63024]: DEBUG oslo_vmware.api [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Task: {'id': task-1950929, 'name': ReconfigVM_Task, 'duration_secs': 0.306481} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1772.094790] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] Reconfigured VM instance instance-00000031 to attach disk [datastore1] 5c2efe96-4ac4-4693-9203-43407d768f66/5c2efe96-4ac4-4693-9203-43407d768f66.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1772.094790] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e21c1d65-0925-44fc-83b9-84a21c696026 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.101020] env[63024]: DEBUG oslo_vmware.api [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Waiting for the task: (returnval){ [ 1772.101020] env[63024]: value = "task-1950930" [ 1772.101020] env[63024]: _type = "Task" [ 1772.101020] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1772.107812] env[63024]: DEBUG oslo_vmware.api [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Task: {'id': task-1950930, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1772.142589] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.780s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1772.142589] env[63024]: DEBUG nova.compute.manager [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1772.144385] env[63024]: DEBUG oslo_concurrency.lockutils [None req-35968ba6-c291-4e9a-b4f7-344d2af5b605 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 41.321s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1772.150477] env[63024]: DEBUG nova.objects.instance [None req-35968ba6-c291-4e9a-b4f7-344d2af5b605 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Lazy-loading 'resources' on Instance uuid f6fddc23-ad36-4d6f-82a2-ded456b2596e {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1772.262039] env[63024]: INFO nova.compute.manager [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Took 52.83 seconds to build instance. [ 1772.298135] env[63024]: DEBUG nova.virt.hardware [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T11:08:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='270e2a66-632e-41fa-bb7b-06506c9e6093',id=37,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-717992172',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1772.298374] env[63024]: DEBUG nova.virt.hardware [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1772.298571] env[63024]: DEBUG nova.virt.hardware [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1772.298770] env[63024]: DEBUG nova.virt.hardware [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1772.298912] env[63024]: DEBUG nova.virt.hardware [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1772.299066] env[63024]: DEBUG nova.virt.hardware [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1772.299269] env[63024]: DEBUG nova.virt.hardware [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1772.300017] env[63024]: DEBUG nova.virt.hardware [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1772.300017] env[63024]: DEBUG nova.virt.hardware [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1772.300017] env[63024]: DEBUG nova.virt.hardware [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1772.300380] env[63024]: DEBUG nova.virt.hardware [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1772.306363] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Reconfiguring VM instance instance-00000027 to detach disk 2000 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1772.306840] env[63024]: DEBUG oslo_concurrency.lockutils [None req-92013dbb-7af0-41eb-8854-04aa9e7626ff tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "9716d592-32d1-4f1d-b42b-1c8a7d81d2f2" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1772.307117] env[63024]: DEBUG oslo_concurrency.lockutils [None req-92013dbb-7af0-41eb-8854-04aa9e7626ff tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "9716d592-32d1-4f1d-b42b-1c8a7d81d2f2" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1772.308021] env[63024]: INFO nova.compute.manager [None req-92013dbb-7af0-41eb-8854-04aa9e7626ff tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Rebooting instance [ 1772.309527] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e41719a2-8be5-4c26-a1c2-343cb73ed70f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.346957] env[63024]: DEBUG oslo_vmware.api [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Waiting for the task: (returnval){ [ 1772.346957] env[63024]: value = "task-1950931" [ 1772.346957] env[63024]: _type = "Task" [ 1772.346957] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1772.359358] env[63024]: DEBUG oslo_vmware.api [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950931, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1772.362095] env[63024]: DEBUG oslo_concurrency.lockutils [None req-92013dbb-7af0-41eb-8854-04aa9e7626ff tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "refresh_cache-9716d592-32d1-4f1d-b42b-1c8a7d81d2f2" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1772.362272] env[63024]: DEBUG oslo_concurrency.lockutils [None req-92013dbb-7af0-41eb-8854-04aa9e7626ff tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquired lock "refresh_cache-9716d592-32d1-4f1d-b42b-1c8a7d81d2f2" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1772.362438] env[63024]: DEBUG nova.network.neutron [None req-92013dbb-7af0-41eb-8854-04aa9e7626ff tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1772.546185] env[63024]: DEBUG nova.network.neutron [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Successfully updated port: 2c1caf53-f9b6-4184-b807-b496dcae4cbb {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1772.558586] env[63024]: DEBUG nova.compute.manager [req-ce479521-7a7e-4b93-8812-ecbbe4c97338 req-96890647-ee5e-47eb-a562-e3740ab4aa01 service nova] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Received event network-vif-plugged-2c1caf53-f9b6-4184-b807-b496dcae4cbb {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1772.558796] env[63024]: DEBUG oslo_concurrency.lockutils [req-ce479521-7a7e-4b93-8812-ecbbe4c97338 req-96890647-ee5e-47eb-a562-e3740ab4aa01 service nova] Acquiring lock "601a003d-811c-4698-b0b6-054482d32c21-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1772.559370] env[63024]: DEBUG oslo_concurrency.lockutils [req-ce479521-7a7e-4b93-8812-ecbbe4c97338 req-96890647-ee5e-47eb-a562-e3740ab4aa01 service nova] Lock "601a003d-811c-4698-b0b6-054482d32c21-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1772.559552] env[63024]: DEBUG oslo_concurrency.lockutils [req-ce479521-7a7e-4b93-8812-ecbbe4c97338 req-96890647-ee5e-47eb-a562-e3740ab4aa01 service nova] Lock "601a003d-811c-4698-b0b6-054482d32c21-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1772.560189] env[63024]: DEBUG nova.compute.manager [req-ce479521-7a7e-4b93-8812-ecbbe4c97338 req-96890647-ee5e-47eb-a562-e3740ab4aa01 service nova] [instance: 601a003d-811c-4698-b0b6-054482d32c21] No waiting events found dispatching network-vif-plugged-2c1caf53-f9b6-4184-b807-b496dcae4cbb {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1772.560189] env[63024]: WARNING nova.compute.manager [req-ce479521-7a7e-4b93-8812-ecbbe4c97338 req-96890647-ee5e-47eb-a562-e3740ab4aa01 service nova] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Received unexpected event network-vif-plugged-2c1caf53-f9b6-4184-b807-b496dcae4cbb for instance with vm_state building and task_state block_device_mapping. [ 1772.610710] env[63024]: DEBUG oslo_vmware.api [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Task: {'id': task-1950930, 'name': Rename_Task, 'duration_secs': 0.135996} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1772.610991] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1772.611249] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5cfe9444-b90a-49fb-91a0-f1b397f554f2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1772.617211] env[63024]: DEBUG oslo_vmware.api [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Waiting for the task: (returnval){ [ 1772.617211] env[63024]: value = "task-1950932" [ 1772.617211] env[63024]: _type = "Task" [ 1772.617211] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1772.624348] env[63024]: DEBUG oslo_vmware.api [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Task: {'id': task-1950932, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1772.651696] env[63024]: DEBUG nova.compute.utils [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1772.656429] env[63024]: DEBUG nova.compute.manager [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1772.656429] env[63024]: DEBUG nova.network.neutron [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1772.711441] env[63024]: DEBUG nova.policy [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a2cc094a0a6b444ab1880fcfb1de4e8b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6bbfeec6d47746328f185acd132e0d5a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1772.767634] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8537d8cc-7773-4200-bad7-7aa8edd15224 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Lock "31a693b6-293a-4f01-9baf-a9e7e8d453d4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 81.133s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1772.862816] env[63024]: DEBUG oslo_vmware.api [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950931, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1773.026671] env[63024]: INFO nova.compute.manager [None req-41ccd733-089f-4759-b077-3bbe6893974d tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Rescuing [ 1773.026925] env[63024]: DEBUG oslo_concurrency.lockutils [None req-41ccd733-089f-4759-b077-3bbe6893974d tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquiring lock "refresh_cache-31a693b6-293a-4f01-9baf-a9e7e8d453d4" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1773.027121] env[63024]: DEBUG oslo_concurrency.lockutils [None req-41ccd733-089f-4759-b077-3bbe6893974d tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquired lock "refresh_cache-31a693b6-293a-4f01-9baf-a9e7e8d453d4" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1773.027243] env[63024]: DEBUG nova.network.neutron [None req-41ccd733-089f-4759-b077-3bbe6893974d tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1773.048045] env[63024]: DEBUG oslo_concurrency.lockutils [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Acquiring lock "refresh_cache-601a003d-811c-4698-b0b6-054482d32c21" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1773.048961] env[63024]: DEBUG oslo_concurrency.lockutils [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Acquired lock "refresh_cache-601a003d-811c-4698-b0b6-054482d32c21" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1773.048961] env[63024]: DEBUG nova.network.neutron [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1773.129598] env[63024]: DEBUG oslo_vmware.api [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Task: {'id': task-1950932, 'name': PowerOnVM_Task, 'duration_secs': 0.451815} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1773.130191] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1773.130393] env[63024]: INFO nova.compute.manager [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] Took 6.99 seconds to spawn the instance on the hypervisor. [ 1773.130588] env[63024]: DEBUG nova.compute.manager [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1773.131456] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b74d954-afd7-4011-b3c3-30195d25147c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.156541] env[63024]: DEBUG nova.compute.manager [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1773.166798] env[63024]: DEBUG nova.compute.manager [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1773.167336] env[63024]: DEBUG nova.virt.hardware [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1773.167541] env[63024]: DEBUG nova.virt.hardware [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1773.167970] env[63024]: DEBUG nova.virt.hardware [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1773.168186] env[63024]: DEBUG nova.virt.hardware [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1773.169046] env[63024]: DEBUG nova.virt.hardware [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1773.169046] env[63024]: DEBUG nova.virt.hardware [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1773.169046] env[63024]: DEBUG nova.virt.hardware [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1773.169046] env[63024]: DEBUG nova.virt.hardware [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1773.169276] env[63024]: DEBUG nova.virt.hardware [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1773.169460] env[63024]: DEBUG nova.virt.hardware [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1773.169669] env[63024]: DEBUG nova.virt.hardware [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1773.173340] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a074be2c-9510-4a25-b2e4-3ad75bf4d2b5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.186317] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b61b7635-ca27-4e4b-9fa3-200a71cf2f5d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.211752] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7352f8be-25bb-419f-9336-8a669ee9bcbe {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.219508] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bbea925-d73a-4fea-a45a-355fedfcdd41 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.257620] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50e98078-e3a9-4833-afa2-47b94dee78eb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.265533] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85346aac-eab7-46f0-965c-efe6d8e90e7b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.271408] env[63024]: DEBUG nova.compute.manager [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1773.281766] env[63024]: DEBUG nova.compute.provider_tree [None req-35968ba6-c291-4e9a-b4f7-344d2af5b605 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1773.338639] env[63024]: DEBUG nova.network.neutron [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Successfully created port: ca53b77e-33c4-4a60-b3aa-bc92763eb98e {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1773.358946] env[63024]: DEBUG oslo_vmware.api [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950931, 'name': ReconfigVM_Task, 'duration_secs': 0.519456} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1773.359231] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Reconfigured VM instance instance-00000027 to detach disk 2000 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1773.360023] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ea3557c-c777-4004-b472-1402395134a2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.382477] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Reconfiguring VM instance instance-00000027 to attach disk [datastore1] 650a97b9-911e-44b0-9e82-a6d4cc95c9dd/650a97b9-911e-44b0-9e82-a6d4cc95c9dd.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1773.382845] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a649f22f-3a82-4076-bc4a-5747adf440a0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.402355] env[63024]: DEBUG oslo_vmware.api [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Waiting for the task: (returnval){ [ 1773.402355] env[63024]: value = "task-1950933" [ 1773.402355] env[63024]: _type = "Task" [ 1773.402355] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1773.410368] env[63024]: DEBUG oslo_vmware.api [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950933, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1773.420689] env[63024]: DEBUG nova.network.neutron [None req-92013dbb-7af0-41eb-8854-04aa9e7626ff tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Updating instance_info_cache with network_info: [{"id": "6e0e9732-b318-4b20-ad72-8c2bc07eaf34", "address": "fa:16:3e:2b:cc:65", "network": {"id": "0cbe22f7-6322-4d92-9a77-2753f6449a2d", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-366684254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.141", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6072e8931d9540ad8fe4a2b4b1ec782d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3ccbdbb-8b49-4a26-913f-2a448b72280f", "external-id": "nsx-vlan-transportzone-412", "segmentation_id": 412, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e0e9732-b3", "ovs_interfaceid": "6e0e9732-b318-4b20-ad72-8c2bc07eaf34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1773.604090] env[63024]: DEBUG nova.network.neutron [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1773.656493] env[63024]: INFO nova.compute.manager [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] Took 54.04 seconds to build instance. [ 1773.782043] env[63024]: DEBUG nova.network.neutron [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Updating instance_info_cache with network_info: [{"id": "2c1caf53-f9b6-4184-b807-b496dcae4cbb", "address": "fa:16:3e:9c:2b:a9", "network": {"id": "6b6b3f2c-8c90-447a-9e4d-a4aff0ad5260", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1251599304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6a85ae55ce0e4f829944ed37204fc98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3a80436-f7a9-431a-acec-aca3d76e3f9b", "external-id": "cl2-zone-339", "segmentation_id": 339, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c1caf53-f9", "ovs_interfaceid": "2c1caf53-f9b6-4184-b807-b496dcae4cbb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1773.792084] env[63024]: DEBUG nova.scheduler.client.report [None req-35968ba6-c291-4e9a-b4f7-344d2af5b605 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1773.814354] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1773.918514] env[63024]: DEBUG oslo_vmware.api [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950933, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1773.925098] env[63024]: DEBUG oslo_concurrency.lockutils [None req-92013dbb-7af0-41eb-8854-04aa9e7626ff tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Releasing lock "refresh_cache-9716d592-32d1-4f1d-b42b-1c8a7d81d2f2" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1773.927889] env[63024]: DEBUG nova.compute.manager [None req-92013dbb-7af0-41eb-8854-04aa9e7626ff tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1773.928803] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0bff0ba-bdd0-42ef-a393-60805df063b2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1773.986400] env[63024]: DEBUG nova.network.neutron [None req-41ccd733-089f-4759-b077-3bbe6893974d tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Updating instance_info_cache with network_info: [{"id": "8530bff5-1223-4b93-9b55-536f7665048a", "address": "fa:16:3e:3c:2c:8c", "network": {"id": "20e77237-56c4-40bb-8203-aa035239c938", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1073367986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12d782556c614caf84a51b37fa43b5de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a64108f9-df0a-4feb-bbb5-97f5841c356c", "external-id": "nsx-vlan-transportzone-67", "segmentation_id": 67, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8530bff5-12", "ovs_interfaceid": "8530bff5-1223-4b93-9b55-536f7665048a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1774.160501] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5b7fc944-4bf9-4c7d-b515-e5a25af97e3c tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Lock "5c2efe96-4ac4-4693-9203-43407d768f66" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.018s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1774.164517] env[63024]: DEBUG nova.compute.manager [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1774.190018] env[63024]: DEBUG nova.virt.hardware [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1774.190285] env[63024]: DEBUG nova.virt.hardware [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1774.190443] env[63024]: DEBUG nova.virt.hardware [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1774.190651] env[63024]: DEBUG nova.virt.hardware [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1774.190840] env[63024]: DEBUG nova.virt.hardware [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1774.190947] env[63024]: DEBUG nova.virt.hardware [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1774.191179] env[63024]: DEBUG nova.virt.hardware [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1774.191338] env[63024]: DEBUG nova.virt.hardware [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1774.191503] env[63024]: DEBUG nova.virt.hardware [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1774.191685] env[63024]: DEBUG nova.virt.hardware [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1774.191865] env[63024]: DEBUG nova.virt.hardware [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1774.193044] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6e14a7e-e283-4edb-a086-7a64308fb9a7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.200791] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43a11802-f6b4-4cdf-8f6c-eae619180067 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.287553] env[63024]: DEBUG oslo_concurrency.lockutils [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Releasing lock "refresh_cache-601a003d-811c-4698-b0b6-054482d32c21" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1774.287951] env[63024]: DEBUG nova.compute.manager [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Instance network_info: |[{"id": "2c1caf53-f9b6-4184-b807-b496dcae4cbb", "address": "fa:16:3e:9c:2b:a9", "network": {"id": "6b6b3f2c-8c90-447a-9e4d-a4aff0ad5260", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1251599304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6a85ae55ce0e4f829944ed37204fc98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3a80436-f7a9-431a-acec-aca3d76e3f9b", "external-id": "cl2-zone-339", "segmentation_id": 339, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c1caf53-f9", "ovs_interfaceid": "2c1caf53-f9b6-4184-b807-b496dcae4cbb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1774.288391] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9c:2b:a9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f3a80436-f7a9-431a-acec-aca3d76e3f9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2c1caf53-f9b6-4184-b807-b496dcae4cbb', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1774.295777] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Creating folder: Project (6a85ae55ce0e4f829944ed37204fc98a). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1774.296079] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cb9e3b78-e0ff-4785-a7cb-f78d61316ebb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.301955] env[63024]: DEBUG oslo_concurrency.lockutils [None req-35968ba6-c291-4e9a-b4f7-344d2af5b605 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.158s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1774.304900] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6320b9bf-cba8-4b69-a02f-ec6140d94dd3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 42.196s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1774.306380] env[63024]: INFO nova.compute.claims [None req-6320b9bf-cba8-4b69-a02f-ec6140d94dd3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e2086b87-ae9c-4968-a847-ac91e5345ec8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1774.312065] env[63024]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 1774.312237] env[63024]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=63024) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 1774.312542] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Folder already exists: Project (6a85ae55ce0e4f829944ed37204fc98a). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 1774.312732] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Creating folder: Instances. Parent ref: group-v402077. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1774.312975] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4d19a1e8-dc24-4a20-a534-1e0aaadb6249 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.323325] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Created folder: Instances in parent group-v402077. [ 1774.323325] env[63024]: DEBUG oslo.service.loopingcall [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1774.324152] env[63024]: INFO nova.scheduler.client.report [None req-35968ba6-c291-4e9a-b4f7-344d2af5b605 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Deleted allocations for instance f6fddc23-ad36-4d6f-82a2-ded456b2596e [ 1774.325698] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1774.328581] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bf588d79-20b4-4a58-8b55-7b5bd09cb14b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.353611] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1774.353611] env[63024]: value = "task-1950936" [ 1774.353611] env[63024]: _type = "Task" [ 1774.353611] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1774.363022] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950936, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.413624] env[63024]: DEBUG oslo_vmware.api [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950933, 'name': ReconfigVM_Task, 'duration_secs': 0.779193} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1774.414098] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Reconfigured VM instance instance-00000027 to attach disk [datastore1] 650a97b9-911e-44b0-9e82-a6d4cc95c9dd/650a97b9-911e-44b0-9e82-a6d4cc95c9dd.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1774.414512] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Updating instance '650a97b9-911e-44b0-9e82-a6d4cc95c9dd' progress to 50 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1774.489467] env[63024]: DEBUG oslo_concurrency.lockutils [None req-41ccd733-089f-4759-b077-3bbe6893974d tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Releasing lock "refresh_cache-31a693b6-293a-4f01-9baf-a9e7e8d453d4" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1774.610909] env[63024]: DEBUG nova.compute.manager [req-3f1d751b-0abd-4d66-b879-f830f5074917 req-746c61be-f9d5-424f-8b3d-bcaf6d2fbf64 service nova] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Received event network-changed-2c1caf53-f9b6-4184-b807-b496dcae4cbb {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1774.611117] env[63024]: DEBUG nova.compute.manager [req-3f1d751b-0abd-4d66-b879-f830f5074917 req-746c61be-f9d5-424f-8b3d-bcaf6d2fbf64 service nova] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Refreshing instance network info cache due to event network-changed-2c1caf53-f9b6-4184-b807-b496dcae4cbb. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1774.611330] env[63024]: DEBUG oslo_concurrency.lockutils [req-3f1d751b-0abd-4d66-b879-f830f5074917 req-746c61be-f9d5-424f-8b3d-bcaf6d2fbf64 service nova] Acquiring lock "refresh_cache-601a003d-811c-4698-b0b6-054482d32c21" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1774.611473] env[63024]: DEBUG oslo_concurrency.lockutils [req-3f1d751b-0abd-4d66-b879-f830f5074917 req-746c61be-f9d5-424f-8b3d-bcaf6d2fbf64 service nova] Acquired lock "refresh_cache-601a003d-811c-4698-b0b6-054482d32c21" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1774.611627] env[63024]: DEBUG nova.network.neutron [req-3f1d751b-0abd-4d66-b879-f830f5074917 req-746c61be-f9d5-424f-8b3d-bcaf6d2fbf64 service nova] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Refreshing network info cache for port 2c1caf53-f9b6-4184-b807-b496dcae4cbb {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1774.626896] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4b7fbcda-fc38-488d-a168-e4a73c15d726 tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Acquiring lock "5c2efe96-4ac4-4693-9203-43407d768f66" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1774.627153] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4b7fbcda-fc38-488d-a168-e4a73c15d726 tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Lock "5c2efe96-4ac4-4693-9203-43407d768f66" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1774.627365] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4b7fbcda-fc38-488d-a168-e4a73c15d726 tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Acquiring lock "5c2efe96-4ac4-4693-9203-43407d768f66-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1774.627548] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4b7fbcda-fc38-488d-a168-e4a73c15d726 tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Lock "5c2efe96-4ac4-4693-9203-43407d768f66-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1774.627717] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4b7fbcda-fc38-488d-a168-e4a73c15d726 tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Lock "5c2efe96-4ac4-4693-9203-43407d768f66-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1774.630196] env[63024]: INFO nova.compute.manager [None req-4b7fbcda-fc38-488d-a168-e4a73c15d726 tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] Terminating instance [ 1774.663734] env[63024]: DEBUG nova.compute.manager [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1774.811757] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6320b9bf-cba8-4b69-a02f-ec6140d94dd3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.507s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1774.812889] env[63024]: DEBUG nova.compute.utils [None req-6320b9bf-cba8-4b69-a02f-ec6140d94dd3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e2086b87-ae9c-4968-a847-ac91e5345ec8] Instance e2086b87-ae9c-4968-a847-ac91e5345ec8 could not be found. {{(pid=63024) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1774.814255] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 41.498s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1774.816025] env[63024]: INFO nova.compute.claims [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1774.818546] env[63024]: DEBUG nova.compute.manager [None req-6320b9bf-cba8-4b69-a02f-ec6140d94dd3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e2086b87-ae9c-4968-a847-ac91e5345ec8] Instance disappeared during build. {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2520}} [ 1774.818546] env[63024]: DEBUG nova.compute.manager [None req-6320b9bf-cba8-4b69-a02f-ec6140d94dd3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e2086b87-ae9c-4968-a847-ac91e5345ec8] Unplugging VIFs for instance {{(pid=63024) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 1774.818671] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6320b9bf-cba8-4b69-a02f-ec6140d94dd3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "refresh_cache-e2086b87-ae9c-4968-a847-ac91e5345ec8" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1774.818813] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6320b9bf-cba8-4b69-a02f-ec6140d94dd3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquired lock "refresh_cache-e2086b87-ae9c-4968-a847-ac91e5345ec8" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1774.818968] env[63024]: DEBUG nova.network.neutron [None req-6320b9bf-cba8-4b69-a02f-ec6140d94dd3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e2086b87-ae9c-4968-a847-ac91e5345ec8] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1774.850383] env[63024]: DEBUG oslo_concurrency.lockutils [None req-35968ba6-c291-4e9a-b4f7-344d2af5b605 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Lock "f6fddc23-ad36-4d6f-82a2-ded456b2596e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 48.765s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1774.865191] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950936, 'name': CreateVM_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1774.923182] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f042a92-4b02-4ee3-8efb-6c9f6b44edb7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.942995] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db62351c-88d0-4328-b702-728406964368 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.946914] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c39b9b62-3741-4f5d-82a7-837c53f9c059 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.965273] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Updating instance '650a97b9-911e-44b0-9e82-a6d4cc95c9dd' progress to 67 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1774.973137] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-92013dbb-7af0-41eb-8854-04aa9e7626ff tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Doing hard reboot of VM {{(pid=63024) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1774.973137] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-dac3e1ab-c0f4-438c-9aea-d08e47b02d93 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1774.979738] env[63024]: DEBUG oslo_vmware.api [None req-92013dbb-7af0-41eb-8854-04aa9e7626ff tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 1774.979738] env[63024]: value = "task-1950937" [ 1774.979738] env[63024]: _type = "Task" [ 1774.979738] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1774.987754] env[63024]: DEBUG oslo_vmware.api [None req-92013dbb-7af0-41eb-8854-04aa9e7626ff tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1950937, 'name': ResetVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.134544] env[63024]: DEBUG nova.compute.manager [None req-4b7fbcda-fc38-488d-a168-e4a73c15d726 tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1775.135179] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4b7fbcda-fc38-488d-a168-e4a73c15d726 tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1775.136736] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41a80eb4-30b0-42a5-a80c-c025d83a4df6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.145521] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b7fbcda-fc38-488d-a168-e4a73c15d726 tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1775.145789] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d0246069-1c2e-4558-be78-116561f7908e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.153137] env[63024]: DEBUG oslo_vmware.api [None req-4b7fbcda-fc38-488d-a168-e4a73c15d726 tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Waiting for the task: (returnval){ [ 1775.153137] env[63024]: value = "task-1950938" [ 1775.153137] env[63024]: _type = "Task" [ 1775.153137] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1775.167666] env[63024]: DEBUG oslo_vmware.api [None req-4b7fbcda-fc38-488d-a168-e4a73c15d726 tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Task: {'id': task-1950938, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.189387] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1775.325355] env[63024]: DEBUG nova.compute.utils [None req-6320b9bf-cba8-4b69-a02f-ec6140d94dd3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e2086b87-ae9c-4968-a847-ac91e5345ec8] Can not refresh info_cache because instance was not found {{(pid=63024) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1024}} [ 1775.353195] env[63024]: DEBUG nova.network.neutron [None req-6320b9bf-cba8-4b69-a02f-ec6140d94dd3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e2086b87-ae9c-4968-a847-ac91e5345ec8] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1775.374842] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950936, 'name': CreateVM_Task, 'duration_secs': 0.753777} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1775.375210] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1775.376701] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'guest_format': None, 'attachment_id': '42bbb9f1-07f2-482d-9f2e-d69f77ff6d55', 'boot_index': 0, 'delete_on_termination': True, 'mount_device': '/dev/sda', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402080', 'volume_id': '51f1feea-0afc-4d43-8b47-c2e3f20d424c', 'name': 'volume-51f1feea-0afc-4d43-8b47-c2e3f20d424c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '601a003d-811c-4698-b0b6-054482d32c21', 'attached_at': '', 'detached_at': '', 'volume_id': '51f1feea-0afc-4d43-8b47-c2e3f20d424c', 'serial': '51f1feea-0afc-4d43-8b47-c2e3f20d424c'}, 'device_type': None, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=63024) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1775.376998] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Root volume attach. Driver type: vmdk {{(pid=63024) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1775.378161] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a2ce9bc-9f94-422f-acb7-02ca5987010b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.389668] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bafd5898-d89e-4c1a-a862-e4d99f91934c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.397433] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-136d4eb4-5211-42b1-9916-4ccb0f458160 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.404138] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-ef6be9df-9255-489a-83d9-1ad069ba74c4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.412047] env[63024]: DEBUG oslo_vmware.api [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Waiting for the task: (returnval){ [ 1775.412047] env[63024]: value = "task-1950939" [ 1775.412047] env[63024]: _type = "Task" [ 1775.412047] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1775.424373] env[63024]: DEBUG oslo_vmware.api [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Task: {'id': task-1950939, 'name': RelocateVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.495054] env[63024]: DEBUG oslo_vmware.api [None req-92013dbb-7af0-41eb-8854-04aa9e7626ff tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1950937, 'name': ResetVM_Task, 'duration_secs': 0.095304} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1775.495054] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-92013dbb-7af0-41eb-8854-04aa9e7626ff tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Did hard reboot of VM {{(pid=63024) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1775.495054] env[63024]: DEBUG nova.compute.manager [None req-92013dbb-7af0-41eb-8854-04aa9e7626ff tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1775.496185] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-676fc2ac-310e-4a85-902c-7e95434cf91d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.501887] env[63024]: DEBUG nova.network.neutron [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Successfully updated port: ca53b77e-33c4-4a60-b3aa-bc92763eb98e {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1775.520296] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-41ccd733-089f-4759-b077-3bbe6893974d tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1775.522026] env[63024]: DEBUG nova.network.neutron [None req-6320b9bf-cba8-4b69-a02f-ec6140d94dd3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e2086b87-ae9c-4968-a847-ac91e5345ec8] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1775.523040] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-57d9fa20-2b66-4b23-89b3-81ff8fd739fd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.531834] env[63024]: DEBUG oslo_vmware.api [None req-41ccd733-089f-4759-b077-3bbe6893974d tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1775.531834] env[63024]: value = "task-1950940" [ 1775.531834] env[63024]: _type = "Task" [ 1775.531834] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1775.541131] env[63024]: DEBUG oslo_vmware.api [None req-41ccd733-089f-4759-b077-3bbe6893974d tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950940, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.606466] env[63024]: DEBUG nova.network.neutron [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Port 241606ef-afe1-4ca8-912c-dae7639e4941 binding to destination host cpu-1 is already ACTIVE {{(pid=63024) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1775.669375] env[63024]: DEBUG oslo_vmware.api [None req-4b7fbcda-fc38-488d-a168-e4a73c15d726 tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Task: {'id': task-1950938, 'name': PowerOffVM_Task, 'duration_secs': 0.22725} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1775.669670] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b7fbcda-fc38-488d-a168-e4a73c15d726 tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1775.669901] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4b7fbcda-fc38-488d-a168-e4a73c15d726 tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1775.670466] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1c57a535-a2b6-45c5-8e8c-506e32d989bb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.682492] env[63024]: DEBUG nova.network.neutron [req-3f1d751b-0abd-4d66-b879-f830f5074917 req-746c61be-f9d5-424f-8b3d-bcaf6d2fbf64 service nova] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Updated VIF entry in instance network info cache for port 2c1caf53-f9b6-4184-b807-b496dcae4cbb. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1775.682913] env[63024]: DEBUG nova.network.neutron [req-3f1d751b-0abd-4d66-b879-f830f5074917 req-746c61be-f9d5-424f-8b3d-bcaf6d2fbf64 service nova] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Updating instance_info_cache with network_info: [{"id": "2c1caf53-f9b6-4184-b807-b496dcae4cbb", "address": "fa:16:3e:9c:2b:a9", "network": {"id": "6b6b3f2c-8c90-447a-9e4d-a4aff0ad5260", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1251599304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6a85ae55ce0e4f829944ed37204fc98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3a80436-f7a9-431a-acec-aca3d76e3f9b", "external-id": "cl2-zone-339", "segmentation_id": 339, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c1caf53-f9", "ovs_interfaceid": "2c1caf53-f9b6-4184-b807-b496dcae4cbb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1775.748627] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4b7fbcda-fc38-488d-a168-e4a73c15d726 tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1775.748940] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4b7fbcda-fc38-488d-a168-e4a73c15d726 tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1775.749141] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b7fbcda-fc38-488d-a168-e4a73c15d726 tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Deleting the datastore file [datastore1] 5c2efe96-4ac4-4693-9203-43407d768f66 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1775.749403] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ac6860bc-be1a-4137-bbbd-1d16abb3733c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.757420] env[63024]: DEBUG oslo_vmware.api [None req-4b7fbcda-fc38-488d-a168-e4a73c15d726 tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Waiting for the task: (returnval){ [ 1775.757420] env[63024]: value = "task-1950942" [ 1775.757420] env[63024]: _type = "Task" [ 1775.757420] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1775.767510] env[63024]: DEBUG oslo_vmware.api [None req-4b7fbcda-fc38-488d-a168-e4a73c15d726 tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Task: {'id': task-1950942, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1775.925360] env[63024]: DEBUG oslo_vmware.api [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Task: {'id': task-1950939, 'name': RelocateVM_Task, 'duration_secs': 0.443653} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1775.928033] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Volume attach. Driver type: vmdk {{(pid=63024) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1775.928238] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] [instance: 601a003d-811c-4698-b0b6-054482d32c21] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402080', 'volume_id': '51f1feea-0afc-4d43-8b47-c2e3f20d424c', 'name': 'volume-51f1feea-0afc-4d43-8b47-c2e3f20d424c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '601a003d-811c-4698-b0b6-054482d32c21', 'attached_at': '', 'detached_at': '', 'volume_id': '51f1feea-0afc-4d43-8b47-c2e3f20d424c', 'serial': '51f1feea-0afc-4d43-8b47-c2e3f20d424c'} {{(pid=63024) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1775.929896] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2ee84e0-082f-4c86-a11c-02c883cb92d2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.950377] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec874531-e6de-48ab-95a2-0048efd75f40 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.973620] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Reconfiguring VM instance instance-00000032 to attach disk [datastore1] volume-51f1feea-0afc-4d43-8b47-c2e3f20d424c/volume-51f1feea-0afc-4d43-8b47-c2e3f20d424c.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1775.976558] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f6b9da3d-7fb5-4241-be4f-23cef10846e8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1775.996725] env[63024]: DEBUG oslo_vmware.api [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Waiting for the task: (returnval){ [ 1775.996725] env[63024]: value = "task-1950943" [ 1775.996725] env[63024]: _type = "Task" [ 1775.996725] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1776.006400] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "refresh_cache-2dd20650-9273-432a-be28-73ccb66c721d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1776.006554] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquired lock "refresh_cache-2dd20650-9273-432a-be28-73ccb66c721d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1776.006701] env[63024]: DEBUG nova.network.neutron [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1776.008104] env[63024]: DEBUG oslo_vmware.api [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Task: {'id': task-1950943, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.013665] env[63024]: DEBUG oslo_concurrency.lockutils [None req-92013dbb-7af0-41eb-8854-04aa9e7626ff tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "9716d592-32d1-4f1d-b42b-1c8a7d81d2f2" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 3.706s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1776.027755] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6320b9bf-cba8-4b69-a02f-ec6140d94dd3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Releasing lock "refresh_cache-e2086b87-ae9c-4968-a847-ac91e5345ec8" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1776.027849] env[63024]: DEBUG nova.compute.manager [None req-6320b9bf-cba8-4b69-a02f-ec6140d94dd3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=63024) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 1776.028023] env[63024]: DEBUG nova.compute.manager [None req-6320b9bf-cba8-4b69-a02f-ec6140d94dd3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e2086b87-ae9c-4968-a847-ac91e5345ec8] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1776.028276] env[63024]: DEBUG nova.network.neutron [None req-6320b9bf-cba8-4b69-a02f-ec6140d94dd3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e2086b87-ae9c-4968-a847-ac91e5345ec8] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1776.041698] env[63024]: DEBUG oslo_vmware.api [None req-41ccd733-089f-4759-b077-3bbe6893974d tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950940, 'name': PowerOffVM_Task, 'duration_secs': 0.219471} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1776.042568] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-41ccd733-089f-4759-b077-3bbe6893974d tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1776.043348] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3530628d-a279-46b4-a437-8c9897776537 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.066519] env[63024]: DEBUG nova.network.neutron [None req-6320b9bf-cba8-4b69-a02f-ec6140d94dd3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e2086b87-ae9c-4968-a847-ac91e5345ec8] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1776.070738] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-549fbe60-55b8-4478-99ef-308d73654a79 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.105801] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-41ccd733-089f-4759-b077-3bbe6893974d tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1776.106105] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8e31ecfc-143b-4289-99dd-169f00a77d62 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.116056] env[63024]: DEBUG oslo_vmware.api [None req-41ccd733-089f-4759-b077-3bbe6893974d tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1776.116056] env[63024]: value = "task-1950944" [ 1776.116056] env[63024]: _type = "Task" [ 1776.116056] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1776.126629] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-41ccd733-089f-4759-b077-3bbe6893974d tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] VM already powered off {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1776.126902] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-41ccd733-089f-4759-b077-3bbe6893974d tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1776.127166] env[63024]: DEBUG oslo_concurrency.lockutils [None req-41ccd733-089f-4759-b077-3bbe6893974d tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1776.127310] env[63024]: DEBUG oslo_concurrency.lockutils [None req-41ccd733-089f-4759-b077-3bbe6893974d tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1776.127481] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-41ccd733-089f-4759-b077-3bbe6893974d tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1776.127719] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0dbcfc4c-e4de-4c7b-9b95-f8c2f616adb4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.135978] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-41ccd733-089f-4759-b077-3bbe6893974d tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1776.136177] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-41ccd733-089f-4759-b077-3bbe6893974d tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1776.136933] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49126529-d716-432c-b761-abdcc6277ac8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.145306] env[63024]: DEBUG oslo_vmware.api [None req-41ccd733-089f-4759-b077-3bbe6893974d tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1776.145306] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b53da5-bfc3-218e-cef3-07b6520a9d39" [ 1776.145306] env[63024]: _type = "Task" [ 1776.145306] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1776.153301] env[63024]: DEBUG oslo_vmware.api [None req-41ccd733-089f-4759-b077-3bbe6893974d tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b53da5-bfc3-218e-cef3-07b6520a9d39, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.190924] env[63024]: DEBUG oslo_concurrency.lockutils [req-3f1d751b-0abd-4d66-b879-f830f5074917 req-746c61be-f9d5-424f-8b3d-bcaf6d2fbf64 service nova] Releasing lock "refresh_cache-601a003d-811c-4698-b0b6-054482d32c21" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1776.267521] env[63024]: DEBUG oslo_vmware.api [None req-4b7fbcda-fc38-488d-a168-e4a73c15d726 tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Task: {'id': task-1950942, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151774} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1776.270116] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b7fbcda-fc38-488d-a168-e4a73c15d726 tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1776.270317] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4b7fbcda-fc38-488d-a168-e4a73c15d726 tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1776.270487] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4b7fbcda-fc38-488d-a168-e4a73c15d726 tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1776.270736] env[63024]: INFO nova.compute.manager [None req-4b7fbcda-fc38-488d-a168-e4a73c15d726 tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1776.271025] env[63024]: DEBUG oslo.service.loopingcall [None req-4b7fbcda-fc38-488d-a168-e4a73c15d726 tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1776.271404] env[63024]: DEBUG nova.compute.manager [-] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1776.271501] env[63024]: DEBUG nova.network.neutron [-] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1776.429760] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92035c9b-44e1-4f15-a397-bf29f1017469 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.438722] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58c1e3a7-846d-450f-9acd-9042bc1bab9e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.473201] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b681ebad-af97-4477-a30b-7bc4eb4fe530 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.483588] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94a04a58-d495-478c-bbbd-592887701384 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.498858] env[63024]: DEBUG nova.compute.provider_tree [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1776.514528] env[63024]: DEBUG oslo_vmware.api [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Task: {'id': task-1950943, 'name': ReconfigVM_Task, 'duration_secs': 0.412923} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1776.515164] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Reconfigured VM instance instance-00000032 to attach disk [datastore1] volume-51f1feea-0afc-4d43-8b47-c2e3f20d424c/volume-51f1feea-0afc-4d43-8b47-c2e3f20d424c.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1776.519987] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6892465d-aed4-4ba1-a40b-3b4c236fc71d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.535860] env[63024]: DEBUG oslo_vmware.api [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Waiting for the task: (returnval){ [ 1776.535860] env[63024]: value = "task-1950945" [ 1776.535860] env[63024]: _type = "Task" [ 1776.535860] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1776.546930] env[63024]: DEBUG oslo_vmware.api [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Task: {'id': task-1950945, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.562568] env[63024]: DEBUG nova.network.neutron [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1776.574483] env[63024]: DEBUG nova.network.neutron [None req-6320b9bf-cba8-4b69-a02f-ec6140d94dd3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e2086b87-ae9c-4968-a847-ac91e5345ec8] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1776.630827] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Acquiring lock "650a97b9-911e-44b0-9e82-a6d4cc95c9dd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1776.631033] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Lock "650a97b9-911e-44b0-9e82-a6d4cc95c9dd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1776.631210] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Lock "650a97b9-911e-44b0-9e82-a6d4cc95c9dd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1776.656386] env[63024]: DEBUG oslo_vmware.api [None req-41ccd733-089f-4759-b077-3bbe6893974d tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b53da5-bfc3-218e-cef3-07b6520a9d39, 'name': SearchDatastore_Task, 'duration_secs': 0.028877} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1776.658749] env[63024]: DEBUG nova.compute.manager [req-9c4d0f01-b3ca-4a41-a959-e855d390d990 req-5fd46b2c-0226-4472-b44b-56a87f34059a service nova] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Received event network-vif-plugged-ca53b77e-33c4-4a60-b3aa-bc92763eb98e {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1776.658949] env[63024]: DEBUG oslo_concurrency.lockutils [req-9c4d0f01-b3ca-4a41-a959-e855d390d990 req-5fd46b2c-0226-4472-b44b-56a87f34059a service nova] Acquiring lock "2dd20650-9273-432a-be28-73ccb66c721d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1776.659161] env[63024]: DEBUG oslo_concurrency.lockutils [req-9c4d0f01-b3ca-4a41-a959-e855d390d990 req-5fd46b2c-0226-4472-b44b-56a87f34059a service nova] Lock "2dd20650-9273-432a-be28-73ccb66c721d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1776.659333] env[63024]: DEBUG oslo_concurrency.lockutils [req-9c4d0f01-b3ca-4a41-a959-e855d390d990 req-5fd46b2c-0226-4472-b44b-56a87f34059a service nova] Lock "2dd20650-9273-432a-be28-73ccb66c721d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1776.659474] env[63024]: DEBUG nova.compute.manager [req-9c4d0f01-b3ca-4a41-a959-e855d390d990 req-5fd46b2c-0226-4472-b44b-56a87f34059a service nova] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] No waiting events found dispatching network-vif-plugged-ca53b77e-33c4-4a60-b3aa-bc92763eb98e {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1776.659633] env[63024]: WARNING nova.compute.manager [req-9c4d0f01-b3ca-4a41-a959-e855d390d990 req-5fd46b2c-0226-4472-b44b-56a87f34059a service nova] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Received unexpected event network-vif-plugged-ca53b77e-33c4-4a60-b3aa-bc92763eb98e for instance with vm_state building and task_state spawning. [ 1776.659788] env[63024]: DEBUG nova.compute.manager [req-9c4d0f01-b3ca-4a41-a959-e855d390d990 req-5fd46b2c-0226-4472-b44b-56a87f34059a service nova] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Received event network-changed-ca53b77e-33c4-4a60-b3aa-bc92763eb98e {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1776.659936] env[63024]: DEBUG nova.compute.manager [req-9c4d0f01-b3ca-4a41-a959-e855d390d990 req-5fd46b2c-0226-4472-b44b-56a87f34059a service nova] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Refreshing instance network info cache due to event network-changed-ca53b77e-33c4-4a60-b3aa-bc92763eb98e. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1776.660111] env[63024]: DEBUG oslo_concurrency.lockutils [req-9c4d0f01-b3ca-4a41-a959-e855d390d990 req-5fd46b2c-0226-4472-b44b-56a87f34059a service nova] Acquiring lock "refresh_cache-2dd20650-9273-432a-be28-73ccb66c721d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1776.660299] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a5012239-70d2-4214-82c5-7b6ba3a4f991 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1776.667331] env[63024]: DEBUG oslo_vmware.api [None req-41ccd733-089f-4759-b077-3bbe6893974d tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1776.667331] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5222c356-abce-e921-c18b-7a23834f1638" [ 1776.667331] env[63024]: _type = "Task" [ 1776.667331] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1776.677326] env[63024]: DEBUG oslo_vmware.api [None req-41ccd733-089f-4759-b077-3bbe6893974d tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5222c356-abce-e921-c18b-7a23834f1638, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1776.798505] env[63024]: DEBUG nova.network.neutron [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Updating instance_info_cache with network_info: [{"id": "ca53b77e-33c4-4a60-b3aa-bc92763eb98e", "address": "fa:16:3e:17:8e:e6", "network": {"id": "8b46e914-e3e8-40c4-8f9d-01d1f97bf25e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1558746173-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bbfeec6d47746328f185acd132e0d5a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afae6acd-1873-4228-9d5a-1cd5d4efe3e4", "external-id": "nsx-vlan-transportzone-183", "segmentation_id": 183, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca53b77e-33", "ovs_interfaceid": "ca53b77e-33c4-4a60-b3aa-bc92763eb98e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1777.007626] env[63024]: DEBUG nova.scheduler.client.report [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1777.046852] env[63024]: DEBUG oslo_vmware.api [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Task: {'id': task-1950945, 'name': ReconfigVM_Task, 'duration_secs': 0.175524} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1777.047167] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402080', 'volume_id': '51f1feea-0afc-4d43-8b47-c2e3f20d424c', 'name': 'volume-51f1feea-0afc-4d43-8b47-c2e3f20d424c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '601a003d-811c-4698-b0b6-054482d32c21', 'attached_at': '', 'detached_at': '', 'volume_id': '51f1feea-0afc-4d43-8b47-c2e3f20d424c', 'serial': '51f1feea-0afc-4d43-8b47-c2e3f20d424c'} {{(pid=63024) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1777.047689] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-779d5420-05bd-4f7e-b999-fe28a31ea222 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.054496] env[63024]: DEBUG oslo_vmware.api [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Waiting for the task: (returnval){ [ 1777.054496] env[63024]: value = "task-1950946" [ 1777.054496] env[63024]: _type = "Task" [ 1777.054496] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1777.063289] env[63024]: DEBUG oslo_vmware.api [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Task: {'id': task-1950946, 'name': Rename_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.076597] env[63024]: INFO nova.compute.manager [None req-6320b9bf-cba8-4b69-a02f-ec6140d94dd3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e2086b87-ae9c-4968-a847-ac91e5345ec8] Took 1.05 seconds to deallocate network for instance. [ 1777.132715] env[63024]: DEBUG nova.network.neutron [-] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1777.178057] env[63024]: DEBUG oslo_vmware.api [None req-41ccd733-089f-4759-b077-3bbe6893974d tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5222c356-abce-e921-c18b-7a23834f1638, 'name': SearchDatastore_Task, 'duration_secs': 0.011602} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1777.178519] env[63024]: DEBUG oslo_concurrency.lockutils [None req-41ccd733-089f-4759-b077-3bbe6893974d tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1777.178879] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-41ccd733-089f-4759-b077-3bbe6893974d tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 31a693b6-293a-4f01-9baf-a9e7e8d453d4/2646ca61-612e-4bc3-97f7-ee492c048835-rescue.vmdk. {{(pid=63024) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1777.179941] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e0b24894-60f2-4035-8063-8a97bdfae321 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.189930] env[63024]: DEBUG oslo_vmware.api [None req-41ccd733-089f-4759-b077-3bbe6893974d tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1777.189930] env[63024]: value = "task-1950947" [ 1777.189930] env[63024]: _type = "Task" [ 1777.189930] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1777.196489] env[63024]: DEBUG oslo_vmware.api [None req-41ccd733-089f-4759-b077-3bbe6893974d tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950947, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.301957] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Releasing lock "refresh_cache-2dd20650-9273-432a-be28-73ccb66c721d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1777.301957] env[63024]: DEBUG nova.compute.manager [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Instance network_info: |[{"id": "ca53b77e-33c4-4a60-b3aa-bc92763eb98e", "address": "fa:16:3e:17:8e:e6", "network": {"id": "8b46e914-e3e8-40c4-8f9d-01d1f97bf25e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1558746173-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bbfeec6d47746328f185acd132e0d5a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afae6acd-1873-4228-9d5a-1cd5d4efe3e4", "external-id": "nsx-vlan-transportzone-183", "segmentation_id": 183, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca53b77e-33", "ovs_interfaceid": "ca53b77e-33c4-4a60-b3aa-bc92763eb98e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1777.303951] env[63024]: DEBUG oslo_concurrency.lockutils [req-9c4d0f01-b3ca-4a41-a959-e855d390d990 req-5fd46b2c-0226-4472-b44b-56a87f34059a service nova] Acquired lock "refresh_cache-2dd20650-9273-432a-be28-73ccb66c721d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1777.303951] env[63024]: DEBUG nova.network.neutron [req-9c4d0f01-b3ca-4a41-a959-e855d390d990 req-5fd46b2c-0226-4472-b44b-56a87f34059a service nova] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Refreshing network info cache for port ca53b77e-33c4-4a60-b3aa-bc92763eb98e {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1777.304223] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:8e:e6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'afae6acd-1873-4228-9d5a-1cd5d4efe3e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ca53b77e-33c4-4a60-b3aa-bc92763eb98e', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1777.317441] env[63024]: DEBUG oslo.service.loopingcall [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1777.321102] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1777.322205] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fc4f473c-bafa-4551-a0da-243f15805cd0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.346681] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1777.346681] env[63024]: value = "task-1950948" [ 1777.346681] env[63024]: _type = "Task" [ 1777.346681] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1777.359857] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950948, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.512999] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.699s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1777.513606] env[63024]: DEBUG nova.compute.manager [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1777.516389] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 42.937s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1777.570474] env[63024]: DEBUG oslo_vmware.api [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Task: {'id': task-1950946, 'name': Rename_Task, 'duration_secs': 0.124234} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1777.570788] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1777.577128] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-664512a6-adda-4e2a-bb55-ab23e1e4d929 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.585720] env[63024]: DEBUG oslo_vmware.api [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Waiting for the task: (returnval){ [ 1777.585720] env[63024]: value = "task-1950949" [ 1777.585720] env[63024]: _type = "Task" [ 1777.585720] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1777.598127] env[63024]: DEBUG oslo_vmware.api [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Task: {'id': task-1950949, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.636362] env[63024]: INFO nova.compute.manager [-] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] Took 1.36 seconds to deallocate network for instance. [ 1777.704374] env[63024]: DEBUG oslo_vmware.api [None req-41ccd733-089f-4759-b077-3bbe6893974d tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950947, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.465527} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1777.704374] env[63024]: INFO nova.virt.vmwareapi.ds_util [None req-41ccd733-089f-4759-b077-3bbe6893974d tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 31a693b6-293a-4f01-9baf-a9e7e8d453d4/2646ca61-612e-4bc3-97f7-ee492c048835-rescue.vmdk. [ 1777.704374] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d133b11-3183-48db-8f94-68e1fc218e47 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.729250] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-41ccd733-089f-4759-b077-3bbe6893974d tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Reconfiguring VM instance instance-00000030 to attach disk [datastore1] 31a693b6-293a-4f01-9baf-a9e7e8d453d4/2646ca61-612e-4bc3-97f7-ee492c048835-rescue.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1777.731865] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7d8c4a23-6884-42fd-ba32-02c6d970bfea {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1777.751410] env[63024]: DEBUG oslo_vmware.api [None req-41ccd733-089f-4759-b077-3bbe6893974d tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1777.751410] env[63024]: value = "task-1950950" [ 1777.751410] env[63024]: _type = "Task" [ 1777.751410] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1777.755215] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Acquiring lock "refresh_cache-650a97b9-911e-44b0-9e82-a6d4cc95c9dd" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1777.755215] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Acquired lock "refresh_cache-650a97b9-911e-44b0-9e82-a6d4cc95c9dd" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1777.755215] env[63024]: DEBUG nova.network.neutron [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1777.761970] env[63024]: DEBUG oslo_vmware.api [None req-41ccd733-089f-4759-b077-3bbe6893974d tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950950, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.858569] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Acquiring lock "e0a37f54-14ca-4eea-a9b3-6e652ca1e48d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1777.858810] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Lock "e0a37f54-14ca-4eea-a9b3-6e652ca1e48d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1777.858965] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950948, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1777.989060] env[63024]: DEBUG nova.network.neutron [req-9c4d0f01-b3ca-4a41-a959-e855d390d990 req-5fd46b2c-0226-4472-b44b-56a87f34059a service nova] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Updated VIF entry in instance network info cache for port ca53b77e-33c4-4a60-b3aa-bc92763eb98e. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1777.989506] env[63024]: DEBUG nova.network.neutron [req-9c4d0f01-b3ca-4a41-a959-e855d390d990 req-5fd46b2c-0226-4472-b44b-56a87f34059a service nova] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Updating instance_info_cache with network_info: [{"id": "ca53b77e-33c4-4a60-b3aa-bc92763eb98e", "address": "fa:16:3e:17:8e:e6", "network": {"id": "8b46e914-e3e8-40c4-8f9d-01d1f97bf25e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1558746173-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bbfeec6d47746328f185acd132e0d5a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afae6acd-1873-4228-9d5a-1cd5d4efe3e4", "external-id": "nsx-vlan-transportzone-183", "segmentation_id": 183, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapca53b77e-33", "ovs_interfaceid": "ca53b77e-33c4-4a60-b3aa-bc92763eb98e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1778.034042] env[63024]: DEBUG nova.compute.utils [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1778.035229] env[63024]: DEBUG nova.compute.manager [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1778.035404] env[63024]: DEBUG nova.network.neutron [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] [instance: 839776ef-0562-424d-b301-2aa896f32e14] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1778.101662] env[63024]: DEBUG oslo_vmware.api [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Task: {'id': task-1950949, 'name': PowerOnVM_Task, 'duration_secs': 0.511685} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1778.109184] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6320b9bf-cba8-4b69-a02f-ec6140d94dd3 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "e2086b87-ae9c-4968-a847-ac91e5345ec8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.026s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1778.109184] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1778.109184] env[63024]: INFO nova.compute.manager [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Took 4.94 seconds to spawn the instance on the hypervisor. [ 1778.109184] env[63024]: DEBUG nova.compute.manager [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1778.109184] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3d3ee0e-beec-47c6-8d37-7d91cbbb1682 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.120789] env[63024]: DEBUG nova.policy [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1041b5bacb5b4df89ec6da6d9400089e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '45d36e5894294d9b875bb0c69c7c2a7c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1778.142215] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4b7fbcda-fc38-488d-a168-e4a73c15d726 tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1778.266893] env[63024]: DEBUG oslo_vmware.api [None req-41ccd733-089f-4759-b077-3bbe6893974d tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950950, 'name': ReconfigVM_Task, 'duration_secs': 0.438295} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1778.267367] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-41ccd733-089f-4759-b077-3bbe6893974d tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Reconfigured VM instance instance-00000030 to attach disk [datastore1] 31a693b6-293a-4f01-9baf-a9e7e8d453d4/2646ca61-612e-4bc3-97f7-ee492c048835-rescue.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1778.268270] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39d7297b-1a78-4d51-aa6b-46474aafe9d7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.297299] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2e0a8a5d-c305-43fe-91f6-411263659901 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.316717] env[63024]: DEBUG oslo_vmware.api [None req-41ccd733-089f-4759-b077-3bbe6893974d tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1778.316717] env[63024]: value = "task-1950951" [ 1778.316717] env[63024]: _type = "Task" [ 1778.316717] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1778.325419] env[63024]: DEBUG oslo_vmware.api [None req-41ccd733-089f-4759-b077-3bbe6893974d tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950951, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.359504] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950948, 'name': CreateVM_Task, 'duration_secs': 0.532966} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1778.359692] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1778.360707] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1778.360898] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1778.361231] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1778.361493] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52bfa277-a0a1-4a02-a736-86135469e08d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.366860] env[63024]: DEBUG oslo_vmware.api [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1778.366860] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52c381a6-7b5d-da68-1d4e-99d4ae7340b1" [ 1778.366860] env[63024]: _type = "Task" [ 1778.366860] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1778.375512] env[63024]: DEBUG oslo_vmware.api [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52c381a6-7b5d-da68-1d4e-99d4ae7340b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.492506] env[63024]: DEBUG oslo_concurrency.lockutils [req-9c4d0f01-b3ca-4a41-a959-e855d390d990 req-5fd46b2c-0226-4472-b44b-56a87f34059a service nova] Releasing lock "refresh_cache-2dd20650-9273-432a-be28-73ccb66c721d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1778.538976] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Applying migration context for instance 650a97b9-911e-44b0-9e82-a6d4cc95c9dd as it has an incoming, in-progress migration cb60c03c-4816-43a9-a522-d9e511f6ee40. Migration status is post-migrating {{(pid=63024) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1778.541118] env[63024]: INFO nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Updating resource usage from migration cb60c03c-4816-43a9-a522-d9e511f6ee40 [ 1778.543432] env[63024]: DEBUG nova.compute.manager [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1778.567510] env[63024]: WARNING nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance b765b8b3-a099-4e23-be30-d1178ecffc37 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1778.567903] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1778.568208] env[63024]: WARNING nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 9cf45c3a-2a74-4f8e-8817-47bbd748a44b is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1778.568517] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1778.568807] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance b7f26f0e-d5a9-42a6-8af2-065659f89cf5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1778.569124] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance e8ad74ce-7862-4574-98e7-14bc54bd5d6c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1778.569421] env[63024]: WARNING nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 18444b47-476a-4ca3-9a4f-0dc58e652143 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1778.569717] env[63024]: WARNING nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 9679a1a2-b003-4a60-a812-8b3a9b5f545f is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1778.570034] env[63024]: WARNING nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 8edc24d6-9073-4836-b14b-422df3ac1b88 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1778.570306] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance df2933d1-32c3-48a6-8ceb-d5e3047d0b78 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1778.570598] env[63024]: WARNING nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 3815d381-760d-40fc-98cf-8e6af287007f is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1778.570883] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 37792b57-3347-4134-a060-53359afa3298 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1778.571221] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance c1fd4146-6dd3-49e9-a744-466e6168e158 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1778.571501] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 82b7019c-5049-4b8b-abb4-46f326ce3d5b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1778.571802] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance b588ea21-dea0-4ee6-8f9e-12007d0a1ce1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1778.572069] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 31a693b6-293a-4f01-9baf-a9e7e8d453d4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1778.572296] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 5c2efe96-4ac4-4693-9203-43407d768f66 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1778.572515] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 601a003d-811c-4698-b0b6-054482d32c21 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1778.572726] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 2dd20650-9273-432a-be28-73ccb66c721d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1778.572950] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Migration cb60c03c-4816-43a9-a522-d9e511f6ee40 is active on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 192, 'DISK_GB': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1778.573315] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 650a97b9-911e-44b0-9e82-a6d4cc95c9dd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1778.573550] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 839776ef-0562-424d-b301-2aa896f32e14 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1778.612333] env[63024]: DEBUG nova.compute.manager [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1778.632452] env[63024]: INFO nova.compute.manager [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Took 51.96 seconds to build instance. [ 1778.690573] env[63024]: DEBUG nova.compute.manager [req-27caea3a-a663-4f84-98d5-be1bcc16de5e req-604b06fe-109e-4b27-9f18-192cc751901b service nova] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] Received event network-vif-deleted-cc0bbdd4-4d81-4cfa-a689-4d95dfdf81d5 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1778.751278] env[63024]: DEBUG nova.network.neutron [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Updating instance_info_cache with network_info: [{"id": "241606ef-afe1-4ca8-912c-dae7639e4941", "address": "fa:16:3e:fc:f3:01", "network": {"id": "b22bfe1a-f169-41c3-ac9b-fdcf93268110", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.130", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d78401abb63840f4b461856cfdb6dbbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap241606ef-af", "ovs_interfaceid": "241606ef-afe1-4ca8-912c-dae7639e4941", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1778.779035] env[63024]: DEBUG nova.network.neutron [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Successfully created port: c8c1b0a3-d31e-4600-b1be-f31f6b4b4071 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1778.828220] env[63024]: DEBUG oslo_vmware.api [None req-41ccd733-089f-4759-b077-3bbe6893974d tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950951, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1778.878238] env[63024]: DEBUG oslo_vmware.api [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52c381a6-7b5d-da68-1d4e-99d4ae7340b1, 'name': SearchDatastore_Task, 'duration_secs': 0.041557} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1778.880033] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1778.880033] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1778.880033] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1778.880033] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1778.880033] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1778.880033] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-36ddcdee-5a3f-4133-8ac5-fc65b56d0bd1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.888026] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1778.888026] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1778.888205] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84dc3166-9254-4da2-9af7-4225069f101c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.892912] env[63024]: DEBUG oslo_vmware.api [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1778.892912] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]524928de-d3d2-1a9c-96f0-b7027c0608a4" [ 1778.892912] env[63024]: _type = "Task" [ 1778.892912] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1778.905048] env[63024]: DEBUG oslo_vmware.api [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]524928de-d3d2-1a9c-96f0-b7027c0608a4, 'name': SearchDatastore_Task, 'duration_secs': 0.007781} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1778.905930] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c42b0c43-2544-47cb-b615-daa2a12d89cf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1778.913351] env[63024]: DEBUG oslo_vmware.api [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1778.913351] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5210c9ca-e2cc-703e-43e2-2e1e13ab2adf" [ 1778.913351] env[63024]: _type = "Task" [ 1778.913351] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1778.921082] env[63024]: DEBUG oslo_vmware.api [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5210c9ca-e2cc-703e-43e2-2e1e13ab2adf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.082381] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 02db92ec-3377-406b-a95c-0022579fa75b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1779.134479] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1779.135066] env[63024]: DEBUG oslo_concurrency.lockutils [None req-452b675b-0764-4cf1-bd5a-9af70de07262 tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Lock "601a003d-811c-4698-b0b6-054482d32c21" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.200s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1779.255898] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Releasing lock "refresh_cache-650a97b9-911e-44b0-9e82-a6d4cc95c9dd" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1779.329481] env[63024]: DEBUG oslo_vmware.api [None req-41ccd733-089f-4759-b077-3bbe6893974d tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950951, 'name': ReconfigVM_Task, 'duration_secs': 0.601081} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1779.329864] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-41ccd733-089f-4759-b077-3bbe6893974d tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1779.330156] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d31dd5c9-b8e3-4a78-9d99-2e816cd5f3bf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.337288] env[63024]: DEBUG oslo_vmware.api [None req-41ccd733-089f-4759-b077-3bbe6893974d tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1779.337288] env[63024]: value = "task-1950952" [ 1779.337288] env[63024]: _type = "Task" [ 1779.337288] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1779.348200] env[63024]: DEBUG oslo_vmware.api [None req-41ccd733-089f-4759-b077-3bbe6893974d tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950952, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.423803] env[63024]: DEBUG oslo_vmware.api [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5210c9ca-e2cc-703e-43e2-2e1e13ab2adf, 'name': SearchDatastore_Task, 'duration_secs': 0.007891} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1779.424087] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1779.424343] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 2dd20650-9273-432a-be28-73ccb66c721d/2dd20650-9273-432a-be28-73ccb66c721d.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1779.424599] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6a1e0022-4914-4677-b9a2-ba985bb9eb21 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.431179] env[63024]: DEBUG oslo_vmware.api [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1779.431179] env[63024]: value = "task-1950953" [ 1779.431179] env[63024]: _type = "Task" [ 1779.431179] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1779.439195] env[63024]: DEBUG oslo_vmware.api [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1950953, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.554871] env[63024]: DEBUG nova.compute.manager [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1779.581982] env[63024]: DEBUG nova.virt.hardware [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1779.582361] env[63024]: DEBUG nova.virt.hardware [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1779.582629] env[63024]: DEBUG nova.virt.hardware [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1779.583026] env[63024]: DEBUG nova.virt.hardware [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1779.583296] env[63024]: DEBUG nova.virt.hardware [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1779.583544] env[63024]: DEBUG nova.virt.hardware [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1779.583868] env[63024]: DEBUG nova.virt.hardware [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1779.584124] env[63024]: DEBUG nova.virt.hardware [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1779.584430] env[63024]: DEBUG nova.virt.hardware [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1779.584683] env[63024]: DEBUG nova.virt.hardware [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1779.584924] env[63024]: DEBUG nova.virt.hardware [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1779.589026] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 6156ce17-3f29-487a-afc5-2fa0fb7f114c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1779.589026] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4875153a-20fe-4d49-95e7-c4ad4a1e0076 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.597802] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cb273b4-dd09-4df0-83d3-25c3d5f9fb11 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.638025] env[63024]: DEBUG nova.compute.manager [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1779.666993] env[63024]: DEBUG nova.compute.manager [req-084fb487-4e3c-42c6-bf74-7d2634160d1d req-e4485aa8-110e-4fd2-8b66-efb1408e523b service nova] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Received event network-changed-2c1caf53-f9b6-4184-b807-b496dcae4cbb {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1779.668271] env[63024]: DEBUG nova.compute.manager [req-084fb487-4e3c-42c6-bf74-7d2634160d1d req-e4485aa8-110e-4fd2-8b66-efb1408e523b service nova] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Refreshing instance network info cache due to event network-changed-2c1caf53-f9b6-4184-b807-b496dcae4cbb. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1779.668271] env[63024]: DEBUG oslo_concurrency.lockutils [req-084fb487-4e3c-42c6-bf74-7d2634160d1d req-e4485aa8-110e-4fd2-8b66-efb1408e523b service nova] Acquiring lock "refresh_cache-601a003d-811c-4698-b0b6-054482d32c21" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1779.668271] env[63024]: DEBUG oslo_concurrency.lockutils [req-084fb487-4e3c-42c6-bf74-7d2634160d1d req-e4485aa8-110e-4fd2-8b66-efb1408e523b service nova] Acquired lock "refresh_cache-601a003d-811c-4698-b0b6-054482d32c21" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1779.668271] env[63024]: DEBUG nova.network.neutron [req-084fb487-4e3c-42c6-bf74-7d2634160d1d req-e4485aa8-110e-4fd2-8b66-efb1408e523b service nova] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Refreshing network info cache for port 2c1caf53-f9b6-4184-b807-b496dcae4cbb {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1779.784826] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-910a94fc-d74c-42a8-b605-082e4a0360f5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.808398] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1a5fa52-5f0d-4e36-bbb7-d1c24c1536b8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1779.817444] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Updating instance '650a97b9-911e-44b0-9e82-a6d4cc95c9dd' progress to 83 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1779.848174] env[63024]: DEBUG oslo_vmware.api [None req-41ccd733-089f-4759-b077-3bbe6893974d tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950952, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1779.942303] env[63024]: DEBUG oslo_vmware.api [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1950953, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.094075] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 9267e5e4-732d-47f1-8a30-d926a1269fb9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1780.162698] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1780.324364] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1780.324668] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e87e3d32-ffda-488d-9995-2e74faab28b9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.332581] env[63024]: DEBUG oslo_vmware.api [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Waiting for the task: (returnval){ [ 1780.332581] env[63024]: value = "task-1950954" [ 1780.332581] env[63024]: _type = "Task" [ 1780.332581] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1780.345195] env[63024]: DEBUG oslo_vmware.api [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950954, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.350302] env[63024]: DEBUG oslo_vmware.api [None req-41ccd733-089f-4759-b077-3bbe6893974d tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950952, 'name': PowerOnVM_Task, 'duration_secs': 0.579843} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1780.350951] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-41ccd733-089f-4759-b077-3bbe6893974d tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1780.354169] env[63024]: DEBUG nova.compute.manager [None req-41ccd733-089f-4759-b077-3bbe6893974d tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1780.355155] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baeaf71c-9f4c-44e7-9826-05a1f93117a6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.441826] env[63024]: DEBUG oslo_vmware.api [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1950953, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.546777} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1780.442102] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 2dd20650-9273-432a-be28-73ccb66c721d/2dd20650-9273-432a-be28-73ccb66c721d.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1780.442312] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1780.442733] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3219fb46-3d17-4fa1-82aa-a4ca5faec089 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.450168] env[63024]: DEBUG oslo_vmware.api [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1780.450168] env[63024]: value = "task-1950955" [ 1780.450168] env[63024]: _type = "Task" [ 1780.450168] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1780.462205] env[63024]: DEBUG oslo_vmware.api [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1950955, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.596400] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 1709d916-d0c4-4706-b41b-8b0ed25f3331 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1780.714990] env[63024]: DEBUG nova.network.neutron [req-084fb487-4e3c-42c6-bf74-7d2634160d1d req-e4485aa8-110e-4fd2-8b66-efb1408e523b service nova] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Updated VIF entry in instance network info cache for port 2c1caf53-f9b6-4184-b807-b496dcae4cbb. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1780.715396] env[63024]: DEBUG nova.network.neutron [req-084fb487-4e3c-42c6-bf74-7d2634160d1d req-e4485aa8-110e-4fd2-8b66-efb1408e523b service nova] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Updating instance_info_cache with network_info: [{"id": "2c1caf53-f9b6-4184-b807-b496dcae4cbb", "address": "fa:16:3e:9c:2b:a9", "network": {"id": "6b6b3f2c-8c90-447a-9e4d-a4aff0ad5260", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1251599304-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.133", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6a85ae55ce0e4f829944ed37204fc98a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3a80436-f7a9-431a-acec-aca3d76e3f9b", "external-id": "cl2-zone-339", "segmentation_id": 339, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2c1caf53-f9", "ovs_interfaceid": "2c1caf53-f9b6-4184-b807-b496dcae4cbb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1780.846470] env[63024]: DEBUG oslo_vmware.api [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950954, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1780.847473] env[63024]: DEBUG nova.network.neutron [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Successfully updated port: c8c1b0a3-d31e-4600-b1be-f31f6b4b4071 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1780.939328] env[63024]: DEBUG nova.compute.manager [req-37e7c2ce-bb21-4c51-a625-55b98569fbc0 req-5b4bf3d5-5825-4b1a-80eb-c8ed9f91c741 service nova] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Received event network-vif-plugged-c8c1b0a3-d31e-4600-b1be-f31f6b4b4071 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1780.939549] env[63024]: DEBUG oslo_concurrency.lockutils [req-37e7c2ce-bb21-4c51-a625-55b98569fbc0 req-5b4bf3d5-5825-4b1a-80eb-c8ed9f91c741 service nova] Acquiring lock "839776ef-0562-424d-b301-2aa896f32e14-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1780.939756] env[63024]: DEBUG oslo_concurrency.lockutils [req-37e7c2ce-bb21-4c51-a625-55b98569fbc0 req-5b4bf3d5-5825-4b1a-80eb-c8ed9f91c741 service nova] Lock "839776ef-0562-424d-b301-2aa896f32e14-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1780.939919] env[63024]: DEBUG oslo_concurrency.lockutils [req-37e7c2ce-bb21-4c51-a625-55b98569fbc0 req-5b4bf3d5-5825-4b1a-80eb-c8ed9f91c741 service nova] Lock "839776ef-0562-424d-b301-2aa896f32e14-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1780.940148] env[63024]: DEBUG nova.compute.manager [req-37e7c2ce-bb21-4c51-a625-55b98569fbc0 req-5b4bf3d5-5825-4b1a-80eb-c8ed9f91c741 service nova] [instance: 839776ef-0562-424d-b301-2aa896f32e14] No waiting events found dispatching network-vif-plugged-c8c1b0a3-d31e-4600-b1be-f31f6b4b4071 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1780.940321] env[63024]: WARNING nova.compute.manager [req-37e7c2ce-bb21-4c51-a625-55b98569fbc0 req-5b4bf3d5-5825-4b1a-80eb-c8ed9f91c741 service nova] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Received unexpected event network-vif-plugged-c8c1b0a3-d31e-4600-b1be-f31f6b4b4071 for instance with vm_state building and task_state spawning. [ 1780.961896] env[63024]: DEBUG oslo_vmware.api [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1950955, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.214584} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1780.962182] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1780.963056] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5913d7e9-7c3d-454a-ad6f-daea2bd0a383 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1780.987449] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] 2dd20650-9273-432a-be28-73ccb66c721d/2dd20650-9273-432a-be28-73ccb66c721d.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1780.987764] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-703dc1a9-e397-4d9b-b734-56236ed81887 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1781.010979] env[63024]: DEBUG oslo_vmware.api [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1781.010979] env[63024]: value = "task-1950956" [ 1781.010979] env[63024]: _type = "Task" [ 1781.010979] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1781.022912] env[63024]: DEBUG oslo_vmware.api [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1950956, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1781.101070] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1781.218389] env[63024]: DEBUG oslo_concurrency.lockutils [req-084fb487-4e3c-42c6-bf74-7d2634160d1d req-e4485aa8-110e-4fd2-8b66-efb1408e523b service nova] Releasing lock "refresh_cache-601a003d-811c-4698-b0b6-054482d32c21" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1781.345307] env[63024]: DEBUG oslo_vmware.api [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1950954, 'name': PowerOnVM_Task, 'duration_secs': 0.696375} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1781.345767] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1781.345767] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f11347cd-0e1e-4f1d-bb8b-339a2b53cfc6 tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Updating instance '650a97b9-911e-44b0-9e82-a6d4cc95c9dd' progress to 100 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1781.350358] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Acquiring lock "refresh_cache-839776ef-0562-424d-b301-2aa896f32e14" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1781.350510] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Acquired lock "refresh_cache-839776ef-0562-424d-b301-2aa896f32e14" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1781.350691] env[63024]: DEBUG nova.network.neutron [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1781.522189] env[63024]: DEBUG oslo_vmware.api [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1950956, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1781.604486] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 81f96b5a-b878-4e6c-9683-00528a4d5650 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1781.888320] env[63024]: DEBUG nova.network.neutron [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1782.025709] env[63024]: DEBUG oslo_vmware.api [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1950956, 'name': ReconfigVM_Task, 'duration_secs': 0.621236} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1782.026059] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Reconfigured VM instance instance-00000033 to attach disk [datastore1] 2dd20650-9273-432a-be28-73ccb66c721d/2dd20650-9273-432a-be28-73ccb66c721d.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1782.026761] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a1e7771a-c310-4365-a253-fc770188c7b1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.033248] env[63024]: DEBUG oslo_vmware.api [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1782.033248] env[63024]: value = "task-1950957" [ 1782.033248] env[63024]: _type = "Task" [ 1782.033248] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1782.043138] env[63024]: DEBUG oslo_vmware.api [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1950957, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1782.107589] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance ac60546a-37b2-4d2a-8505-61fe202e2ed0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1782.173376] env[63024]: DEBUG nova.network.neutron [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Updating instance_info_cache with network_info: [{"id": "c8c1b0a3-d31e-4600-b1be-f31f6b4b4071", "address": "fa:16:3e:30:da:7e", "network": {"id": "c6d5dbfd-7cfb-4a2d-a8dd-088f4c0b9461", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-2085991502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45d36e5894294d9b875bb0c69c7c2a7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db68bd64-5b56-49af-a075-13dcf85cb2e0", "external-id": "nsx-vlan-transportzone-590", "segmentation_id": 590, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8c1b0a3-d3", "ovs_interfaceid": "c8c1b0a3-d31e-4600-b1be-f31f6b4b4071", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1782.544031] env[63024]: DEBUG oslo_vmware.api [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1950957, 'name': Rename_Task, 'duration_secs': 0.176566} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1782.544322] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1782.544556] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b080d38a-3d9d-4e40-aefd-7b93f67d741d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.550982] env[63024]: DEBUG oslo_vmware.api [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1782.550982] env[63024]: value = "task-1950958" [ 1782.550982] env[63024]: _type = "Task" [ 1782.550982] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1782.558950] env[63024]: DEBUG oslo_vmware.api [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1950958, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1782.610574] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 7cf0ac90-d87d-4644-8a88-da5328d1721d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1782.677425] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Releasing lock "refresh_cache-839776ef-0562-424d-b301-2aa896f32e14" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1782.677425] env[63024]: DEBUG nova.compute.manager [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Instance network_info: |[{"id": "c8c1b0a3-d31e-4600-b1be-f31f6b4b4071", "address": "fa:16:3e:30:da:7e", "network": {"id": "c6d5dbfd-7cfb-4a2d-a8dd-088f4c0b9461", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-2085991502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45d36e5894294d9b875bb0c69c7c2a7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db68bd64-5b56-49af-a075-13dcf85cb2e0", "external-id": "nsx-vlan-transportzone-590", "segmentation_id": 590, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8c1b0a3-d3", "ovs_interfaceid": "c8c1b0a3-d31e-4600-b1be-f31f6b4b4071", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1782.677868] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:30:da:7e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'db68bd64-5b56-49af-a075-13dcf85cb2e0', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c8c1b0a3-d31e-4600-b1be-f31f6b4b4071', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1782.686309] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Creating folder: Project (45d36e5894294d9b875bb0c69c7c2a7c). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1782.686585] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3402bc16-3931-4e9d-8c15-6be915b275bd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.697685] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Created folder: Project (45d36e5894294d9b875bb0c69c7c2a7c) in parent group-v401959. [ 1782.697881] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Creating folder: Instances. Parent ref: group-v402117. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1782.698193] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e29df9a9-a410-4c78-a003-636341684185 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.707315] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Created folder: Instances in parent group-v402117. [ 1782.707542] env[63024]: DEBUG oslo.service.loopingcall [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1782.707726] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1782.707922] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6003fa03-c2b7-46e1-bd24-0fea87807413 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1782.727662] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1782.727662] env[63024]: value = "task-1950961" [ 1782.727662] env[63024]: _type = "Task" [ 1782.727662] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1782.736046] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950961, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.002427] env[63024]: DEBUG nova.compute.manager [req-badb9886-6227-454a-b5f5-3784750a2631 req-b90e7e8a-82ba-4164-acf3-8843ce779e59 service nova] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Received event network-changed-c8c1b0a3-d31e-4600-b1be-f31f6b4b4071 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1783.002427] env[63024]: DEBUG nova.compute.manager [req-badb9886-6227-454a-b5f5-3784750a2631 req-b90e7e8a-82ba-4164-acf3-8843ce779e59 service nova] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Refreshing instance network info cache due to event network-changed-c8c1b0a3-d31e-4600-b1be-f31f6b4b4071. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1783.002565] env[63024]: DEBUG oslo_concurrency.lockutils [req-badb9886-6227-454a-b5f5-3784750a2631 req-b90e7e8a-82ba-4164-acf3-8843ce779e59 service nova] Acquiring lock "refresh_cache-839776ef-0562-424d-b301-2aa896f32e14" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1783.002728] env[63024]: DEBUG oslo_concurrency.lockutils [req-badb9886-6227-454a-b5f5-3784750a2631 req-b90e7e8a-82ba-4164-acf3-8843ce779e59 service nova] Acquired lock "refresh_cache-839776ef-0562-424d-b301-2aa896f32e14" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1783.002887] env[63024]: DEBUG nova.network.neutron [req-badb9886-6227-454a-b5f5-3784750a2631 req-b90e7e8a-82ba-4164-acf3-8843ce779e59 service nova] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Refreshing network info cache for port c8c1b0a3-d31e-4600-b1be-f31f6b4b4071 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1783.064253] env[63024]: DEBUG oslo_vmware.api [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1950958, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.113271] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 92d1f96e-bbe7-4654-9d3a-47ba40057157 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1783.238672] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950961, 'name': CreateVM_Task, 'duration_secs': 0.340442} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1783.238870] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1783.239717] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1783.239803] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1783.240115] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1783.240375] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2293c3f8-50fb-471f-bac1-bb24e36fd05f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.245078] env[63024]: DEBUG oslo_vmware.api [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Waiting for the task: (returnval){ [ 1783.245078] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52d6ca07-181c-badc-a17f-0213081c7b95" [ 1783.245078] env[63024]: _type = "Task" [ 1783.245078] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1783.252845] env[63024]: DEBUG oslo_vmware.api [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52d6ca07-181c-badc-a17f-0213081c7b95, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.562059] env[63024]: DEBUG oslo_vmware.api [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1950958, 'name': PowerOnVM_Task, 'duration_secs': 0.611575} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1783.562427] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1783.562552] env[63024]: INFO nova.compute.manager [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Took 9.40 seconds to spawn the instance on the hypervisor. [ 1783.562671] env[63024]: DEBUG nova.compute.manager [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1783.563448] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-674d6df5-cf4c-4e03-8fed-5462a3611bea {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.573581] env[63024]: INFO nova.compute.manager [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Rescuing [ 1783.573858] env[63024]: DEBUG oslo_concurrency.lockutils [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquiring lock "refresh_cache-b588ea21-dea0-4ee6-8f9e-12007d0a1ce1" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1783.573960] env[63024]: DEBUG oslo_concurrency.lockutils [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquired lock "refresh_cache-b588ea21-dea0-4ee6-8f9e-12007d0a1ce1" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1783.574133] env[63024]: DEBUG nova.network.neutron [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1783.617975] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance c12774e4-77d1-4001-8d5d-0240dfed4ead has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1783.756262] env[63024]: DEBUG oslo_vmware.api [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52d6ca07-181c-badc-a17f-0213081c7b95, 'name': SearchDatastore_Task, 'duration_secs': 0.01157} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1783.756560] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1783.756794] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1783.757031] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1783.757177] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1783.757352] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1783.757605] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d48c3db8-8da1-422f-8087-b78eb6553e9f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.768181] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1783.768360] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1783.769451] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad02a3b7-a286-4871-ab64-858b70f5704c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1783.775177] env[63024]: DEBUG oslo_vmware.api [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Waiting for the task: (returnval){ [ 1783.775177] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]529212d6-4b73-cb37-c121-b9da5024335c" [ 1783.775177] env[63024]: _type = "Task" [ 1783.775177] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1783.782241] env[63024]: DEBUG oslo_vmware.api [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]529212d6-4b73-cb37-c121-b9da5024335c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1783.788629] env[63024]: DEBUG nova.network.neutron [req-badb9886-6227-454a-b5f5-3784750a2631 req-b90e7e8a-82ba-4164-acf3-8843ce779e59 service nova] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Updated VIF entry in instance network info cache for port c8c1b0a3-d31e-4600-b1be-f31f6b4b4071. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1783.788998] env[63024]: DEBUG nova.network.neutron [req-badb9886-6227-454a-b5f5-3784750a2631 req-b90e7e8a-82ba-4164-acf3-8843ce779e59 service nova] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Updating instance_info_cache with network_info: [{"id": "c8c1b0a3-d31e-4600-b1be-f31f6b4b4071", "address": "fa:16:3e:30:da:7e", "network": {"id": "c6d5dbfd-7cfb-4a2d-a8dd-088f4c0b9461", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-2085991502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45d36e5894294d9b875bb0c69c7c2a7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db68bd64-5b56-49af-a075-13dcf85cb2e0", "external-id": "nsx-vlan-transportzone-590", "segmentation_id": 590, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8c1b0a3-d3", "ovs_interfaceid": "c8c1b0a3-d31e-4600-b1be-f31f6b4b4071", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1784.084430] env[63024]: INFO nova.compute.manager [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Took 56.08 seconds to build instance. [ 1784.120700] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 9bf1316e-f1ae-426e-a0a2-d814a2460c4d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1784.285927] env[63024]: DEBUG oslo_vmware.api [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]529212d6-4b73-cb37-c121-b9da5024335c, 'name': SearchDatastore_Task, 'duration_secs': 0.020926} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1784.286741] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-474f34d5-bbdf-4236-b0f7-8e5d40c44ca8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.292284] env[63024]: DEBUG oslo_concurrency.lockutils [req-badb9886-6227-454a-b5f5-3784750a2631 req-b90e7e8a-82ba-4164-acf3-8843ce779e59 service nova] Releasing lock "refresh_cache-839776ef-0562-424d-b301-2aa896f32e14" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1784.292768] env[63024]: DEBUG oslo_vmware.api [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Waiting for the task: (returnval){ [ 1784.292768] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]524193fc-5f21-a44d-3610-985def64e5d8" [ 1784.292768] env[63024]: _type = "Task" [ 1784.292768] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1784.301607] env[63024]: DEBUG oslo_vmware.api [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]524193fc-5f21-a44d-3610-985def64e5d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1784.454060] env[63024]: DEBUG nova.network.neutron [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Port 241606ef-afe1-4ca8-912c-dae7639e4941 binding to destination host cpu-1 is already ACTIVE {{(pid=63024) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1784.454332] env[63024]: DEBUG oslo_concurrency.lockutils [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Acquiring lock "refresh_cache-650a97b9-911e-44b0-9e82-a6d4cc95c9dd" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1784.454479] env[63024]: DEBUG oslo_concurrency.lockutils [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Acquired lock "refresh_cache-650a97b9-911e-44b0-9e82-a6d4cc95c9dd" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1784.454642] env[63024]: DEBUG nova.network.neutron [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1784.587105] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ca823356-9d87-421d-b9f6-ce94987b5044 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "2dd20650-9273-432a-be28-73ccb66c721d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.162s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1784.623947] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance fe6847e2-a742-4338-983f-698c13aaefde has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1784.669188] env[63024]: DEBUG nova.network.neutron [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Updating instance_info_cache with network_info: [{"id": "83b7b8fb-a30a-4852-889a-ec6b339cc100", "address": "fa:16:3e:cf:79:7d", "network": {"id": "20e77237-56c4-40bb-8203-aa035239c938", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1073367986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12d782556c614caf84a51b37fa43b5de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a64108f9-df0a-4feb-bbb5-97f5841c356c", "external-id": "nsx-vlan-transportzone-67", "segmentation_id": 67, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83b7b8fb-a3", "ovs_interfaceid": "83b7b8fb-a30a-4852-889a-ec6b339cc100", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1784.804585] env[63024]: DEBUG oslo_vmware.api [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]524193fc-5f21-a44d-3610-985def64e5d8, 'name': SearchDatastore_Task, 'duration_secs': 0.026837} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1784.804772] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1784.805141] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 839776ef-0562-424d-b301-2aa896f32e14/839776ef-0562-424d-b301-2aa896f32e14.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1784.805316] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-28a503c3-3862-43f1-80e2-a70c98486306 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1784.811975] env[63024]: DEBUG oslo_vmware.api [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Waiting for the task: (returnval){ [ 1784.811975] env[63024]: value = "task-1950962" [ 1784.811975] env[63024]: _type = "Task" [ 1784.811975] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1784.819471] env[63024]: DEBUG oslo_vmware.api [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Task: {'id': task-1950962, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1785.090056] env[63024]: DEBUG nova.compute.manager [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1785.126518] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 1448c924-7c61-4c43-a4e7-5a6dd45375cc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1785.171652] env[63024]: DEBUG oslo_concurrency.lockutils [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Releasing lock "refresh_cache-b588ea21-dea0-4ee6-8f9e-12007d0a1ce1" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1785.312323] env[63024]: DEBUG nova.network.neutron [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Updating instance_info_cache with network_info: [{"id": "241606ef-afe1-4ca8-912c-dae7639e4941", "address": "fa:16:3e:fc:f3:01", "network": {"id": "b22bfe1a-f169-41c3-ac9b-fdcf93268110", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.130", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d78401abb63840f4b461856cfdb6dbbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap241606ef-af", "ovs_interfaceid": "241606ef-afe1-4ca8-912c-dae7639e4941", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1785.322635] env[63024]: DEBUG oslo_vmware.api [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Task: {'id': task-1950962, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.456578} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1785.323533] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 839776ef-0562-424d-b301-2aa896f32e14/839776ef-0562-424d-b301-2aa896f32e14.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1785.323786] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1785.324251] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-16056a6f-136d-4390-8718-340de4f26c12 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.331155] env[63024]: DEBUG oslo_vmware.api [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Waiting for the task: (returnval){ [ 1785.331155] env[63024]: value = "task-1950963" [ 1785.331155] env[63024]: _type = "Task" [ 1785.331155] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1785.339571] env[63024]: DEBUG oslo_vmware.api [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Task: {'id': task-1950963, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1785.447606] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d6cf3c21-82f2-47c5-9082-9dff4ebccb32 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "2dd20650-9273-432a-be28-73ccb66c721d" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1785.447982] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d6cf3c21-82f2-47c5-9082-9dff4ebccb32 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "2dd20650-9273-432a-be28-73ccb66c721d" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1785.448185] env[63024]: DEBUG nova.compute.manager [None req-d6cf3c21-82f2-47c5-9082-9dff4ebccb32 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1785.449103] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd398566-1004-4c86-a216-f06417d784f3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.456186] env[63024]: DEBUG nova.compute.manager [None req-d6cf3c21-82f2-47c5-9082-9dff4ebccb32 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63024) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1785.456748] env[63024]: DEBUG nova.objects.instance [None req-d6cf3c21-82f2-47c5-9082-9dff4ebccb32 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lazy-loading 'flavor' on Instance uuid 2dd20650-9273-432a-be28-73ccb66c721d {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1785.618376] env[63024]: DEBUG oslo_concurrency.lockutils [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1785.630355] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 73db94b8-cfa8-4457-bccb-d4b780edbd93 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1785.818222] env[63024]: DEBUG oslo_concurrency.lockutils [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Releasing lock "refresh_cache-650a97b9-911e-44b0-9e82-a6d4cc95c9dd" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1785.842697] env[63024]: DEBUG oslo_vmware.api [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Task: {'id': task-1950963, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.16305} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1785.842697] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1785.843692] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ddbdc5e-f3f9-48f3-915a-740f99f46051 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.868071] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Reconfiguring VM instance instance-00000035 to attach disk [datastore1] 839776ef-0562-424d-b301-2aa896f32e14/839776ef-0562-424d-b301-2aa896f32e14.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1785.868187] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e4a69853-7bd6-421d-b911-c81574667a56 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1785.887845] env[63024]: DEBUG oslo_vmware.api [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Waiting for the task: (returnval){ [ 1785.887845] env[63024]: value = "task-1950964" [ 1785.887845] env[63024]: _type = "Task" [ 1785.887845] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1785.896179] env[63024]: DEBUG oslo_vmware.api [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Task: {'id': task-1950964, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1786.133865] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance e0a37f54-14ca-4eea-a9b3-6e652ca1e48d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1786.134197] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Total usable vcpus: 48, total allocated vcpus: 16 {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1786.134342] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3712MB phys_disk=200GB used_disk=15GB total_vcpus=48 used_vcpus=16 pci_stats=[] {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1786.214278] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1786.215034] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ba791011-7226-453f-bc80-3d3c3b5e73c3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.222112] env[63024]: DEBUG oslo_vmware.api [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1786.222112] env[63024]: value = "task-1950965" [ 1786.222112] env[63024]: _type = "Task" [ 1786.222112] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1786.233378] env[63024]: DEBUG oslo_vmware.api [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950965, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1786.322436] env[63024]: DEBUG nova.compute.manager [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=63024) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1786.322719] env[63024]: DEBUG oslo_concurrency.lockutils [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1786.400252] env[63024]: DEBUG oslo_vmware.api [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Task: {'id': task-1950964, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1786.463693] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6cf3c21-82f2-47c5-9082-9dff4ebccb32 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1786.464049] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-25544eba-b0df-4814-b74f-2850c4eda737 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.473468] env[63024]: DEBUG oslo_vmware.api [None req-d6cf3c21-82f2-47c5-9082-9dff4ebccb32 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1786.473468] env[63024]: value = "task-1950966" [ 1786.473468] env[63024]: _type = "Task" [ 1786.473468] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1786.482600] env[63024]: DEBUG oslo_vmware.api [None req-d6cf3c21-82f2-47c5-9082-9dff4ebccb32 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1950966, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1786.589052] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2fd2886-8eae-478e-96ea-9407d4dd72b0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.596271] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fc82979-0d39-4582-8d88-81747bb4e892 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.626099] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92f38c82-4968-4e54-ad11-9e646545e0ca {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.633690] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33c72688-8cbc-45fe-a431-4bb2a9add366 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.648050] env[63024]: DEBUG nova.compute.provider_tree [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1786.735023] env[63024]: DEBUG oslo_vmware.api [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950965, 'name': PowerOffVM_Task, 'duration_secs': 0.203891} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1786.735023] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1786.735023] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a618030b-7952-4be0-8733-0a0986385dbb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.750863] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88f8e239-fb71-478a-ab9b-29c6f5a98562 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.778021] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1786.778174] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c110e5f0-6d9f-4090-b5f9-05657c99084e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1786.785126] env[63024]: DEBUG oslo_vmware.api [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1786.785126] env[63024]: value = "task-1950967" [ 1786.785126] env[63024]: _type = "Task" [ 1786.785126] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1786.793629] env[63024]: DEBUG oslo_vmware.api [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950967, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1786.900373] env[63024]: DEBUG oslo_vmware.api [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Task: {'id': task-1950964, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1786.982970] env[63024]: DEBUG oslo_vmware.api [None req-d6cf3c21-82f2-47c5-9082-9dff4ebccb32 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1950966, 'name': PowerOffVM_Task, 'duration_secs': 0.499596} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1786.983403] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6cf3c21-82f2-47c5-9082-9dff4ebccb32 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1786.983542] env[63024]: DEBUG nova.compute.manager [None req-d6cf3c21-82f2-47c5-9082-9dff4ebccb32 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1786.984817] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a27963ff-d93a-4283-a69c-64939edad2aa {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.174472] env[63024]: ERROR nova.scheduler.client.report [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [req-18be5ada-62a3-4171-ba81-20d53b015dc2] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 89dfa68a-133e-436f-a9f1-86051f9fb96b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-18be5ada-62a3-4171-ba81-20d53b015dc2"}]} [ 1787.190846] env[63024]: DEBUG nova.scheduler.client.report [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Refreshing inventories for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1787.207626] env[63024]: DEBUG nova.scheduler.client.report [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Updating ProviderTree inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1787.207945] env[63024]: DEBUG nova.compute.provider_tree [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1787.219367] env[63024]: DEBUG nova.scheduler.client.report [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Refreshing aggregate associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, aggregates: None {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1787.238680] env[63024]: DEBUG nova.scheduler.client.report [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Refreshing trait associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1787.299047] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] VM already powered off {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1787.299361] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1787.299697] env[63024]: DEBUG oslo_concurrency.lockutils [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1787.299906] env[63024]: DEBUG oslo_concurrency.lockutils [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1787.300170] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1787.300505] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-737d1c41-a2ee-4095-b133-48b3a4f11062 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.310718] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1787.310981] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1787.312022] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4254013e-8267-4aed-ad92-8ba2ddb946ff {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.318391] env[63024]: DEBUG oslo_vmware.api [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1787.318391] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52f1f745-4030-8a40-7342-7a08f5c3dd9c" [ 1787.318391] env[63024]: _type = "Task" [ 1787.318391] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1787.328721] env[63024]: DEBUG oslo_vmware.api [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52f1f745-4030-8a40-7342-7a08f5c3dd9c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.401842] env[63024]: DEBUG oslo_vmware.api [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Task: {'id': task-1950964, 'name': ReconfigVM_Task, 'duration_secs': 1.019655} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1787.402207] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Reconfigured VM instance instance-00000035 to attach disk [datastore1] 839776ef-0562-424d-b301-2aa896f32e14/839776ef-0562-424d-b301-2aa896f32e14.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1787.402964] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b1bbda45-21a2-41e3-b1bc-d4150bbfb98c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.410307] env[63024]: DEBUG oslo_vmware.api [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Waiting for the task: (returnval){ [ 1787.410307] env[63024]: value = "task-1950968" [ 1787.410307] env[63024]: _type = "Task" [ 1787.410307] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1787.424849] env[63024]: DEBUG oslo_vmware.api [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Task: {'id': task-1950968, 'name': Rename_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.498782] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d6cf3c21-82f2-47c5-9082-9dff4ebccb32 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "2dd20650-9273-432a-be28-73ccb66c721d" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.051s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1787.785837] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2962480d-07f6-409a-bb81-8b5b003a1bcc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.793648] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ca149e5-38b4-4c39-b9d1-0db8f2d5355e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.825552] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efd1d831-a406-47bd-95a1-5301517fe1d0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.836565] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a82b0fc-e5fe-49dc-a634-cdc706835625 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.840119] env[63024]: DEBUG oslo_vmware.api [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52f1f745-4030-8a40-7342-7a08f5c3dd9c, 'name': SearchDatastore_Task, 'duration_secs': 0.012582} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1787.841119] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0a53fb0-416f-47aa-9c13-984c797c07e1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.850396] env[63024]: DEBUG nova.compute.provider_tree [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1787.854081] env[63024]: DEBUG oslo_vmware.api [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1787.854081] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]526c3757-2492-02bc-1604-41a41dfcc0ad" [ 1787.854081] env[63024]: _type = "Task" [ 1787.854081] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1787.861057] env[63024]: DEBUG oslo_vmware.api [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]526c3757-2492-02bc-1604-41a41dfcc0ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1787.919180] env[63024]: DEBUG oslo_vmware.api [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Task: {'id': task-1950968, 'name': Rename_Task, 'duration_secs': 0.269924} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1787.919405] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1787.919639] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fa769b63-c77f-4c13-a5ab-9ca1fc2bfa5b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1787.925668] env[63024]: DEBUG oslo_vmware.api [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Waiting for the task: (returnval){ [ 1787.925668] env[63024]: value = "task-1950969" [ 1787.925668] env[63024]: _type = "Task" [ 1787.925668] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1787.933122] env[63024]: DEBUG oslo_vmware.api [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Task: {'id': task-1950969, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.142367] env[63024]: INFO nova.compute.manager [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Rebuilding instance [ 1788.180199] env[63024]: DEBUG nova.compute.manager [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1788.181240] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbe58e2a-e468-4c0c-ba38-1145fe641c26 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.353165] env[63024]: DEBUG nova.scheduler.client.report [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1788.369276] env[63024]: DEBUG oslo_vmware.api [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]526c3757-2492-02bc-1604-41a41dfcc0ad, 'name': SearchDatastore_Task, 'duration_secs': 0.010116} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1788.369648] env[63024]: DEBUG oslo_concurrency.lockutils [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1788.369935] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] b588ea21-dea0-4ee6-8f9e-12007d0a1ce1/2646ca61-612e-4bc3-97f7-ee492c048835-rescue.vmdk. {{(pid=63024) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1788.370248] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e76a305c-5217-4007-acf5-8471d5119ab2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.378846] env[63024]: DEBUG oslo_vmware.api [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1788.378846] env[63024]: value = "task-1950970" [ 1788.378846] env[63024]: _type = "Task" [ 1788.378846] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1788.386829] env[63024]: DEBUG oslo_vmware.api [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950970, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.435203] env[63024]: DEBUG oslo_vmware.api [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Task: {'id': task-1950969, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.862941] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63024) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1788.863361] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 11.347s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1788.863537] env[63024]: DEBUG oslo_concurrency.lockutils [None req-33fa5d9a-e11c-4d20-84c9-2e051d492713 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 53.637s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1788.863759] env[63024]: DEBUG oslo_concurrency.lockutils [None req-33fa5d9a-e11c-4d20-84c9-2e051d492713 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1788.865927] env[63024]: DEBUG oslo_concurrency.lockutils [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 52.675s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1788.867456] env[63024]: INFO nova.compute.claims [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1788.888671] env[63024]: DEBUG oslo_vmware.api [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950970, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1788.893899] env[63024]: INFO nova.scheduler.client.report [None req-33fa5d9a-e11c-4d20-84c9-2e051d492713 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Deleted allocations for instance b765b8b3-a099-4e23-be30-d1178ecffc37 [ 1788.936773] env[63024]: DEBUG oslo_vmware.api [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Task: {'id': task-1950969, 'name': PowerOnVM_Task, 'duration_secs': 0.530666} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1788.937033] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1788.937234] env[63024]: INFO nova.compute.manager [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Took 9.38 seconds to spawn the instance on the hypervisor. [ 1788.937407] env[63024]: DEBUG nova.compute.manager [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1788.938146] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4196ca06-eef8-4ad5-bdbf-bef17379178b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.196931] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1789.196931] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-60201df3-b2b5-403b-9f8d-f33c33896843 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.203492] env[63024]: DEBUG oslo_vmware.api [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1789.203492] env[63024]: value = "task-1950971" [ 1789.203492] env[63024]: _type = "Task" [ 1789.203492] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.212365] env[63024]: DEBUG oslo_vmware.api [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1950971, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.395311] env[63024]: DEBUG oslo_vmware.api [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950970, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.556046} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1789.395661] env[63024]: INFO nova.virt.vmwareapi.ds_util [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] b588ea21-dea0-4ee6-8f9e-12007d0a1ce1/2646ca61-612e-4bc3-97f7-ee492c048835-rescue.vmdk. [ 1789.396426] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad3d1854-0086-4e58-835c-ace01f26a74a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.425645] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Reconfiguring VM instance instance-0000002f to attach disk [datastore1] b588ea21-dea0-4ee6-8f9e-12007d0a1ce1/2646ca61-612e-4bc3-97f7-ee492c048835-rescue.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1789.426296] env[63024]: DEBUG oslo_concurrency.lockutils [None req-33fa5d9a-e11c-4d20-84c9-2e051d492713 tempest-ListImageFiltersTestJSON-333418535 tempest-ListImageFiltersTestJSON-333418535-project-member] Lock "b765b8b3-a099-4e23-be30-d1178ecffc37" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 57.759s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1789.427480] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0f7da39e-c388-4570-8d59-212b6ad04c6e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.455183] env[63024]: DEBUG oslo_vmware.api [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1789.455183] env[63024]: value = "task-1950972" [ 1789.455183] env[63024]: _type = "Task" [ 1789.455183] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.457532] env[63024]: INFO nova.compute.manager [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Took 56.19 seconds to build instance. [ 1789.468381] env[63024]: DEBUG oslo_vmware.api [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950972, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.714803] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] VM already powered off {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1789.715153] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1789.716439] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4823dfb1-3030-4932-8923-4c0db76d7ab2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.723703] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1789.723922] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6884ec13-5856-42e7-bb2c-bedc1356c7af {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.818691] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1789.818691] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1789.818864] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Deleting the datastore file [datastore1] 2dd20650-9273-432a-be28-73ccb66c721d {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1789.819133] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-11fd71e4-3297-433a-8df2-d0189ef6148d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1789.826748] env[63024]: DEBUG oslo_vmware.api [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1789.826748] env[63024]: value = "task-1950974" [ 1789.826748] env[63024]: _type = "Task" [ 1789.826748] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1789.835021] env[63024]: DEBUG oslo_vmware.api [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1950974, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1789.962951] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c6b8f28d-76bb-48e4-8351-7ae81a6c1847 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Lock "839776ef-0562-424d-b301-2aa896f32e14" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.703s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1789.973727] env[63024]: DEBUG oslo_vmware.api [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950972, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.025244] env[63024]: DEBUG nova.compute.manager [req-05182a11-3091-4ce5-814c-13cf12961588 req-e68d0f1f-d703-4f2f-a2b5-0f551351238b service nova] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Received event network-changed-c8c1b0a3-d31e-4600-b1be-f31f6b4b4071 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1790.025442] env[63024]: DEBUG nova.compute.manager [req-05182a11-3091-4ce5-814c-13cf12961588 req-e68d0f1f-d703-4f2f-a2b5-0f551351238b service nova] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Refreshing instance network info cache due to event network-changed-c8c1b0a3-d31e-4600-b1be-f31f6b4b4071. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1790.025652] env[63024]: DEBUG oslo_concurrency.lockutils [req-05182a11-3091-4ce5-814c-13cf12961588 req-e68d0f1f-d703-4f2f-a2b5-0f551351238b service nova] Acquiring lock "refresh_cache-839776ef-0562-424d-b301-2aa896f32e14" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1790.025791] env[63024]: DEBUG oslo_concurrency.lockutils [req-05182a11-3091-4ce5-814c-13cf12961588 req-e68d0f1f-d703-4f2f-a2b5-0f551351238b service nova] Acquired lock "refresh_cache-839776ef-0562-424d-b301-2aa896f32e14" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1790.026708] env[63024]: DEBUG nova.network.neutron [req-05182a11-3091-4ce5-814c-13cf12961588 req-e68d0f1f-d703-4f2f-a2b5-0f551351238b service nova] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Refreshing network info cache for port c8c1b0a3-d31e-4600-b1be-f31f6b4b4071 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1790.336603] env[63024]: DEBUG oslo_vmware.api [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1950974, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.178592} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.339356] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1790.339585] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1790.340037] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1790.397484] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cb16c86-9f57-4a0a-80b1-722ec1171b23 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.408328] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36ec244a-6d00-45dc-a961-d4161a3a184d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.440431] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91c32773-aea7-4c0a-a9bc-b4cd57863dec {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.450434] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11f6d1c3-e6ff-461c-8b10-3cb6c60eaa75 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.467246] env[63024]: DEBUG nova.compute.provider_tree [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1790.473544] env[63024]: DEBUG nova.compute.manager [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1790.482602] env[63024]: DEBUG oslo_vmware.api [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950972, 'name': ReconfigVM_Task, 'duration_secs': 0.704374} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1790.483012] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Reconfigured VM instance instance-0000002f to attach disk [datastore1] b588ea21-dea0-4ee6-8f9e-12007d0a1ce1/2646ca61-612e-4bc3-97f7-ee492c048835-rescue.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1790.483899] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af06f8a9-b95f-4d1e-9e2e-5057f394adc3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.511568] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-68b6f515-5a55-4dff-98eb-517724419a6c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1790.528761] env[63024]: DEBUG oslo_vmware.api [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1790.528761] env[63024]: value = "task-1950975" [ 1790.528761] env[63024]: _type = "Task" [ 1790.528761] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1790.540013] env[63024]: DEBUG oslo_vmware.api [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950975, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1790.848990] env[63024]: DEBUG nova.network.neutron [req-05182a11-3091-4ce5-814c-13cf12961588 req-e68d0f1f-d703-4f2f-a2b5-0f551351238b service nova] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Updated VIF entry in instance network info cache for port c8c1b0a3-d31e-4600-b1be-f31f6b4b4071. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1790.849353] env[63024]: DEBUG nova.network.neutron [req-05182a11-3091-4ce5-814c-13cf12961588 req-e68d0f1f-d703-4f2f-a2b5-0f551351238b service nova] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Updating instance_info_cache with network_info: [{"id": "c8c1b0a3-d31e-4600-b1be-f31f6b4b4071", "address": "fa:16:3e:30:da:7e", "network": {"id": "c6d5dbfd-7cfb-4a2d-a8dd-088f4c0b9461", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-2085991502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.194", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45d36e5894294d9b875bb0c69c7c2a7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db68bd64-5b56-49af-a075-13dcf85cb2e0", "external-id": "nsx-vlan-transportzone-590", "segmentation_id": 590, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8c1b0a3-d3", "ovs_interfaceid": "c8c1b0a3-d31e-4600-b1be-f31f6b4b4071", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1790.995160] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1791.002962] env[63024]: DEBUG nova.scheduler.client.report [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Updated inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with generation 93 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1791.003255] env[63024]: DEBUG nova.compute.provider_tree [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Updating resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b generation from 93 to 94 during operation: update_inventory {{(pid=63024) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1791.003461] env[63024]: DEBUG nova.compute.provider_tree [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1791.039042] env[63024]: DEBUG oslo_vmware.api [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950975, 'name': ReconfigVM_Task, 'duration_secs': 0.149646} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1791.039301] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1791.039568] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-12fc00bf-cf44-4bfd-bef7-3128bda03bc5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.045450] env[63024]: DEBUG oslo_vmware.api [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1791.045450] env[63024]: value = "task-1950976" [ 1791.045450] env[63024]: _type = "Task" [ 1791.045450] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1791.053531] env[63024]: DEBUG oslo_vmware.api [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950976, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.355257] env[63024]: DEBUG oslo_concurrency.lockutils [req-05182a11-3091-4ce5-814c-13cf12961588 req-e68d0f1f-d703-4f2f-a2b5-0f551351238b service nova] Releasing lock "refresh_cache-839776ef-0562-424d-b301-2aa896f32e14" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1791.390368] env[63024]: DEBUG nova.virt.hardware [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1791.392760] env[63024]: DEBUG nova.virt.hardware [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1791.393363] env[63024]: DEBUG nova.virt.hardware [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1791.393603] env[63024]: DEBUG nova.virt.hardware [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1791.393757] env[63024]: DEBUG nova.virt.hardware [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1791.396485] env[63024]: DEBUG nova.virt.hardware [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1791.396485] env[63024]: DEBUG nova.virt.hardware [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1791.396485] env[63024]: DEBUG nova.virt.hardware [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1791.396485] env[63024]: DEBUG nova.virt.hardware [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1791.396485] env[63024]: DEBUG nova.virt.hardware [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1791.396485] env[63024]: DEBUG nova.virt.hardware [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1791.396485] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25722bdd-06f5-483d-ae18-dee663bdafe6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.407945] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04ac28d6-f084-4e14-945d-2fbd3acaa2ad {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.423665] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:8e:e6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'afae6acd-1873-4228-9d5a-1cd5d4efe3e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ca53b77e-33c4-4a60-b3aa-bc92763eb98e', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1791.433464] env[63024]: DEBUG oslo.service.loopingcall [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1791.433877] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1791.434212] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1d58727b-a8c9-46f4-8cde-8cd65dc78cbc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.454348] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1791.454348] env[63024]: value = "task-1950977" [ 1791.454348] env[63024]: _type = "Task" [ 1791.454348] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1791.462481] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950977, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1791.508655] env[63024]: DEBUG oslo_concurrency.lockutils [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.643s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1791.509250] env[63024]: DEBUG nova.compute.manager [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1791.512106] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fafcb525-9d6d-4370-8611-bc6302ab109d tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 50.061s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1791.516020] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fafcb525-9d6d-4370-8611-bc6302ab109d tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1791.516020] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 49.362s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1791.516295] env[63024]: INFO nova.compute.claims [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1791.553536] env[63024]: INFO nova.scheduler.client.report [None req-fafcb525-9d6d-4370-8611-bc6302ab109d tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Deleted allocations for instance 18444b47-476a-4ca3-9a4f-0dc58e652143 [ 1791.561815] env[63024]: DEBUG oslo_vmware.api [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950976, 'name': PowerOnVM_Task, 'duration_secs': 0.374227} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1791.561815] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1791.566233] env[63024]: DEBUG nova.compute.manager [None req-79738213-763e-4255-973f-69b5349bf4d1 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1791.567530] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71e015e1-7767-46d7-8117-ea5973ecba40 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1791.966184] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950977, 'name': CreateVM_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.021910] env[63024]: DEBUG nova.compute.utils [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1792.027025] env[63024]: DEBUG nova.compute.manager [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1792.027025] env[63024]: DEBUG nova.network.neutron [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1792.066094] env[63024]: DEBUG nova.policy [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eafd86d3b1ca4ff39f99adcf65f83476', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '15259880cc744207b8b5113828a80440', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1792.070334] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fafcb525-9d6d-4370-8611-bc6302ab109d tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Lock "18444b47-476a-4ca3-9a4f-0dc58e652143" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 54.756s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1792.431395] env[63024]: DEBUG nova.network.neutron [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] Successfully created port: 16b0c4a3-52c9-4522-a517-2dc2dc94eac5 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1792.466579] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950977, 'name': CreateVM_Task, 'duration_secs': 0.852819} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.466806] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1792.467430] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1792.467595] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1792.467918] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1792.468190] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-298f8c38-e591-48cc-836d-364a665be9c4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.473973] env[63024]: DEBUG oslo_vmware.api [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1792.473973] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5240b5e5-0dfc-9b1d-e024-a011367c415e" [ 1792.473973] env[63024]: _type = "Task" [ 1792.473973] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1792.482031] env[63024]: DEBUG oslo_vmware.api [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5240b5e5-0dfc-9b1d-e024-a011367c415e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1792.527088] env[63024]: DEBUG nova.compute.manager [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1792.987598] env[63024]: DEBUG oslo_vmware.api [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5240b5e5-0dfc-9b1d-e024-a011367c415e, 'name': SearchDatastore_Task, 'duration_secs': 0.031702} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1792.987931] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1792.988198] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1792.988436] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1792.988612] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1792.988827] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1792.989194] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-63a68a4d-6bdb-4c11-bb04-0eaa4f5288a8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1792.997327] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1792.997650] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1792.998207] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0d7fe9d-7894-428f-9ccd-016051f24954 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.006489] env[63024]: DEBUG oslo_vmware.api [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1793.006489] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]528403d3-04d7-2342-052e-5811db0073d2" [ 1793.006489] env[63024]: _type = "Task" [ 1793.006489] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1793.016523] env[63024]: DEBUG oslo_vmware.api [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]528403d3-04d7-2342-052e-5811db0073d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.067204] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5c0b918-0986-44a8-adcd-21d598344cf4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.076960] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6493f0b3-7901-4732-b3b0-2b7946e0d316 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.111338] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c945a78a-9e8b-4366-b16b-6099f6af66fd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.119152] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fb90dca-7934-4222-a605-6ce31910bd0f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.134561] env[63024]: DEBUG nova.compute.provider_tree [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1793.518883] env[63024]: DEBUG oslo_vmware.api [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]528403d3-04d7-2342-052e-5811db0073d2, 'name': SearchDatastore_Task, 'duration_secs': 0.011479} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1793.520343] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f66bdfbf-4923-4ee2-a07e-23ef12fd58a1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.525387] env[63024]: DEBUG oslo_vmware.api [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1793.525387] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52a8845c-61d1-6334-37b4-c04308a17838" [ 1793.525387] env[63024]: _type = "Task" [ 1793.525387] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1793.533803] env[63024]: DEBUG oslo_vmware.api [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52a8845c-61d1-6334-37b4-c04308a17838, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1793.540416] env[63024]: DEBUG nova.compute.manager [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1793.582634] env[63024]: DEBUG nova.virt.hardware [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1793.582926] env[63024]: DEBUG nova.virt.hardware [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1793.583262] env[63024]: DEBUG nova.virt.hardware [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1793.583510] env[63024]: DEBUG nova.virt.hardware [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1793.583693] env[63024]: DEBUG nova.virt.hardware [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1793.583859] env[63024]: DEBUG nova.virt.hardware [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1793.584136] env[63024]: DEBUG nova.virt.hardware [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1793.584336] env[63024]: DEBUG nova.virt.hardware [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1793.584520] env[63024]: DEBUG nova.virt.hardware [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1793.584717] env[63024]: DEBUG nova.virt.hardware [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1793.584909] env[63024]: DEBUG nova.virt.hardware [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1793.585902] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fa25b50-8fd5-4c3f-a543-7718aa313326 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.594707] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1640341-e48a-428c-9f38-61f696903b7a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.637989] env[63024]: DEBUG nova.scheduler.client.report [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1793.715964] env[63024]: INFO nova.compute.manager [None req-b2979e49-67db-48b1-a73c-81e129a08c33 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Unrescuing [ 1793.716323] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b2979e49-67db-48b1-a73c-81e129a08c33 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquiring lock "refresh_cache-b588ea21-dea0-4ee6-8f9e-12007d0a1ce1" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1793.716478] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b2979e49-67db-48b1-a73c-81e129a08c33 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquired lock "refresh_cache-b588ea21-dea0-4ee6-8f9e-12007d0a1ce1" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1793.716647] env[63024]: DEBUG nova.network.neutron [None req-b2979e49-67db-48b1-a73c-81e129a08c33 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1794.036053] env[63024]: DEBUG oslo_vmware.api [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52a8845c-61d1-6334-37b4-c04308a17838, 'name': SearchDatastore_Task, 'duration_secs': 0.012119} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.037031] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1794.037031] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 2dd20650-9273-432a-be28-73ccb66c721d/2dd20650-9273-432a-be28-73ccb66c721d.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1794.037031] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e5c9eb46-de97-4a6f-9527-87946c45f1c4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.044187] env[63024]: DEBUG oslo_vmware.api [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1794.044187] env[63024]: value = "task-1950978" [ 1794.044187] env[63024]: _type = "Task" [ 1794.044187] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.052853] env[63024]: DEBUG oslo_vmware.api [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1950978, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.073187] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Acquiring lock "52c17abc-78f0-417b-8675-e8d62bc8baa3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1794.073483] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Lock "52c17abc-78f0-417b-8675-e8d62bc8baa3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1794.103777] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Acquiring lock "e1be531c-e849-42ac-8319-5bd453a7a562" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1794.103923] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Lock "e1be531c-e849-42ac-8319-5bd453a7a562" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1794.145877] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.631s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1794.146156] env[63024]: DEBUG nova.compute.manager [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1794.150025] env[63024]: DEBUG oslo_concurrency.lockutils [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 51.264s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1794.150283] env[63024]: DEBUG nova.objects.instance [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63024) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1794.345400] env[63024]: DEBUG nova.compute.manager [req-e998162e-d6c1-47f6-8d90-f5472278e72c req-fa5e4c6e-ca0a-49d5-837f-89d01380f30c service nova] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] Received event network-vif-plugged-16b0c4a3-52c9-4522-a517-2dc2dc94eac5 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1794.345729] env[63024]: DEBUG oslo_concurrency.lockutils [req-e998162e-d6c1-47f6-8d90-f5472278e72c req-fa5e4c6e-ca0a-49d5-837f-89d01380f30c service nova] Acquiring lock "02db92ec-3377-406b-a95c-0022579fa75b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1794.345954] env[63024]: DEBUG oslo_concurrency.lockutils [req-e998162e-d6c1-47f6-8d90-f5472278e72c req-fa5e4c6e-ca0a-49d5-837f-89d01380f30c service nova] Lock "02db92ec-3377-406b-a95c-0022579fa75b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1794.346209] env[63024]: DEBUG oslo_concurrency.lockutils [req-e998162e-d6c1-47f6-8d90-f5472278e72c req-fa5e4c6e-ca0a-49d5-837f-89d01380f30c service nova] Lock "02db92ec-3377-406b-a95c-0022579fa75b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1794.346418] env[63024]: DEBUG nova.compute.manager [req-e998162e-d6c1-47f6-8d90-f5472278e72c req-fa5e4c6e-ca0a-49d5-837f-89d01380f30c service nova] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] No waiting events found dispatching network-vif-plugged-16b0c4a3-52c9-4522-a517-2dc2dc94eac5 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1794.346632] env[63024]: WARNING nova.compute.manager [req-e998162e-d6c1-47f6-8d90-f5472278e72c req-fa5e4c6e-ca0a-49d5-837f-89d01380f30c service nova] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] Received unexpected event network-vif-plugged-16b0c4a3-52c9-4522-a517-2dc2dc94eac5 for instance with vm_state building and task_state spawning. [ 1794.554406] env[63024]: DEBUG oslo_vmware.api [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1950978, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.629054] env[63024]: DEBUG nova.network.neutron [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] Successfully updated port: 16b0c4a3-52c9-4522-a517-2dc2dc94eac5 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1794.641956] env[63024]: DEBUG nova.network.neutron [None req-b2979e49-67db-48b1-a73c-81e129a08c33 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Updating instance_info_cache with network_info: [{"id": "83b7b8fb-a30a-4852-889a-ec6b339cc100", "address": "fa:16:3e:cf:79:7d", "network": {"id": "20e77237-56c4-40bb-8203-aa035239c938", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1073367986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12d782556c614caf84a51b37fa43b5de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a64108f9-df0a-4feb-bbb5-97f5841c356c", "external-id": "nsx-vlan-transportzone-67", "segmentation_id": 67, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap83b7b8fb-a3", "ovs_interfaceid": "83b7b8fb-a30a-4852-889a-ec6b339cc100", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1794.658563] env[63024]: DEBUG nova.compute.utils [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1794.665240] env[63024]: DEBUG nova.compute.manager [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1794.665501] env[63024]: DEBUG nova.network.neutron [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1794.754213] env[63024]: DEBUG nova.policy [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '893bfe0d8eef423aae6c7eb5cdc1a9e9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '18540818b60e4483963d14559bc5c38d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1795.055996] env[63024]: DEBUG oslo_vmware.api [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1950978, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.75805} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1795.056390] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 2dd20650-9273-432a-be28-73ccb66c721d/2dd20650-9273-432a-be28-73ccb66c721d.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1795.056748] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1795.057066] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1c5b1d1f-6f09-49a7-a1b0-3d86919b059a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.065858] env[63024]: DEBUG oslo_vmware.api [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1795.065858] env[63024]: value = "task-1950979" [ 1795.065858] env[63024]: _type = "Task" [ 1795.065858] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.076533] env[63024]: DEBUG oslo_vmware.api [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1950979, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.139662] env[63024]: DEBUG oslo_concurrency.lockutils [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Acquiring lock "refresh_cache-02db92ec-3377-406b-a95c-0022579fa75b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1795.139662] env[63024]: DEBUG oslo_concurrency.lockutils [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Acquired lock "refresh_cache-02db92ec-3377-406b-a95c-0022579fa75b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1795.139662] env[63024]: DEBUG nova.network.neutron [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1795.145398] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b2979e49-67db-48b1-a73c-81e129a08c33 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Releasing lock "refresh_cache-b588ea21-dea0-4ee6-8f9e-12007d0a1ce1" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1795.146139] env[63024]: DEBUG nova.objects.instance [None req-b2979e49-67db-48b1-a73c-81e129a08c33 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Lazy-loading 'flavor' on Instance uuid b588ea21-dea0-4ee6-8f9e-12007d0a1ce1 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1795.167133] env[63024]: DEBUG nova.compute.manager [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1795.169935] env[63024]: DEBUG oslo_concurrency.lockutils [None req-796a3055-1b90-4441-9dfc-e396d76ec3c5 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.020s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1795.171453] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3636e943-2d61-49f0-83a8-71a4eaa33724 tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 51.526s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1795.171823] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3636e943-2d61-49f0-83a8-71a4eaa33724 tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1795.173938] env[63024]: DEBUG oslo_concurrency.lockutils [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 48.977s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1795.177442] env[63024]: INFO nova.compute.claims [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1795.202377] env[63024]: INFO nova.scheduler.client.report [None req-3636e943-2d61-49f0-83a8-71a4eaa33724 tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Deleted allocations for instance 3815d381-760d-40fc-98cf-8e6af287007f [ 1795.236085] env[63024]: DEBUG nova.network.neutron [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] Successfully created port: 15f01f95-afb7-4613-899d-bce865e8ac82 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1795.576844] env[63024]: DEBUG oslo_vmware.api [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1950979, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066748} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1795.577134] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1795.578096] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-268de6d9-76a8-4bd3-bea5-914e893bff9a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.600879] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Reconfiguring VM instance instance-00000033 to attach disk [datastore1] 2dd20650-9273-432a-be28-73ccb66c721d/2dd20650-9273-432a-be28-73ccb66c721d.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1795.601252] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b8853492-2f4e-4b4c-9f90-e98d0912ff16 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.622090] env[63024]: DEBUG oslo_vmware.api [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1795.622090] env[63024]: value = "task-1950980" [ 1795.622090] env[63024]: _type = "Task" [ 1795.622090] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.633642] env[63024]: DEBUG oslo_vmware.api [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1950980, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.653816] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-343aa2b0-cc53-4921-9a9a-b7e83901cc95 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.694794] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2979e49-67db-48b1-a73c-81e129a08c33 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1795.695731] env[63024]: DEBUG nova.network.neutron [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1795.697998] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5ea91318-b8c7-489d-9cf0-f52db6084f76 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.706027] env[63024]: DEBUG oslo_vmware.api [None req-b2979e49-67db-48b1-a73c-81e129a08c33 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1795.706027] env[63024]: value = "task-1950981" [ 1795.706027] env[63024]: _type = "Task" [ 1795.706027] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1795.712906] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3636e943-2d61-49f0-83a8-71a4eaa33724 tempest-TenantUsagesTestJSON-685471113 tempest-TenantUsagesTestJSON-685471113-project-member] Lock "3815d381-760d-40fc-98cf-8e6af287007f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 55.782s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1795.717157] env[63024]: DEBUG oslo_vmware.api [None req-b2979e49-67db-48b1-a73c-81e129a08c33 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950981, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1795.853907] env[63024]: DEBUG nova.network.neutron [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] Updating instance_info_cache with network_info: [{"id": "16b0c4a3-52c9-4522-a517-2dc2dc94eac5", "address": "fa:16:3e:e5:3b:13", "network": {"id": "ef2e6069-4a95-4a79-9e36-681802b1c9a9", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-434138680-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "15259880cc744207b8b5113828a80440", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27e0a333-0cad-496c-8e6e-37a2edc97ac4", "external-id": "nsx-vlan-transportzone-83", "segmentation_id": 83, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16b0c4a3-52", "ovs_interfaceid": "16b0c4a3-52c9-4522-a517-2dc2dc94eac5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1796.134158] env[63024]: DEBUG oslo_vmware.api [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1950980, 'name': ReconfigVM_Task, 'duration_secs': 0.511777} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1796.134460] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Reconfigured VM instance instance-00000033 to attach disk [datastore1] 2dd20650-9273-432a-be28-73ccb66c721d/2dd20650-9273-432a-be28-73ccb66c721d.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1796.135162] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7cf531fd-8ddc-4df7-a5cb-cd0b354793ff {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.141595] env[63024]: DEBUG oslo_vmware.api [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1796.141595] env[63024]: value = "task-1950982" [ 1796.141595] env[63024]: _type = "Task" [ 1796.141595] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1796.150120] env[63024]: DEBUG oslo_vmware.api [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1950982, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.202234] env[63024]: DEBUG nova.compute.manager [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1796.214774] env[63024]: DEBUG oslo_vmware.api [None req-b2979e49-67db-48b1-a73c-81e129a08c33 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950981, 'name': PowerOffVM_Task, 'duration_secs': 0.225558} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1796.214914] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2979e49-67db-48b1-a73c-81e129a08c33 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1796.220493] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-b2979e49-67db-48b1-a73c-81e129a08c33 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Reconfiguring VM instance instance-0000002f to detach disk 2001 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1796.220802] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4c5d8062-987f-45b6-9d85-d960497cbbc4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.245660] env[63024]: DEBUG oslo_vmware.api [None req-b2979e49-67db-48b1-a73c-81e129a08c33 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1796.245660] env[63024]: value = "task-1950983" [ 1796.245660] env[63024]: _type = "Task" [ 1796.245660] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1796.247806] env[63024]: DEBUG nova.virt.hardware [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1796.248040] env[63024]: DEBUG nova.virt.hardware [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1796.248200] env[63024]: DEBUG nova.virt.hardware [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1796.248569] env[63024]: DEBUG nova.virt.hardware [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1796.248734] env[63024]: DEBUG nova.virt.hardware [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1796.248882] env[63024]: DEBUG nova.virt.hardware [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1796.249110] env[63024]: DEBUG nova.virt.hardware [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1796.249272] env[63024]: DEBUG nova.virt.hardware [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1796.249438] env[63024]: DEBUG nova.virt.hardware [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1796.249599] env[63024]: DEBUG nova.virt.hardware [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1796.249819] env[63024]: DEBUG nova.virt.hardware [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1796.250756] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96ad6891-7bde-4849-951f-000da32ea60c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.264762] env[63024]: DEBUG oslo_vmware.api [None req-b2979e49-67db-48b1-a73c-81e129a08c33 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950983, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.267923] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3c9874b-b979-4905-810f-9f1df818094d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.358924] env[63024]: DEBUG oslo_concurrency.lockutils [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Releasing lock "refresh_cache-02db92ec-3377-406b-a95c-0022579fa75b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1796.359277] env[63024]: DEBUG nova.compute.manager [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] Instance network_info: |[{"id": "16b0c4a3-52c9-4522-a517-2dc2dc94eac5", "address": "fa:16:3e:e5:3b:13", "network": {"id": "ef2e6069-4a95-4a79-9e36-681802b1c9a9", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-434138680-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "15259880cc744207b8b5113828a80440", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27e0a333-0cad-496c-8e6e-37a2edc97ac4", "external-id": "nsx-vlan-transportzone-83", "segmentation_id": 83, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16b0c4a3-52", "ovs_interfaceid": "16b0c4a3-52c9-4522-a517-2dc2dc94eac5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1796.359929] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e5:3b:13', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '27e0a333-0cad-496c-8e6e-37a2edc97ac4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '16b0c4a3-52c9-4522-a517-2dc2dc94eac5', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1796.368098] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Creating folder: Project (15259880cc744207b8b5113828a80440). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1796.368430] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-48a9f693-a717-4539-bdd0-41e9c00dbe8e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.376764] env[63024]: DEBUG nova.compute.manager [req-73081980-58a1-4fde-aa90-523ed830a9db req-677e73cb-6d28-48b2-84b4-9488a65d35b3 service nova] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] Received event network-changed-16b0c4a3-52c9-4522-a517-2dc2dc94eac5 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1796.377062] env[63024]: DEBUG nova.compute.manager [req-73081980-58a1-4fde-aa90-523ed830a9db req-677e73cb-6d28-48b2-84b4-9488a65d35b3 service nova] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] Refreshing instance network info cache due to event network-changed-16b0c4a3-52c9-4522-a517-2dc2dc94eac5. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1796.377277] env[63024]: DEBUG oslo_concurrency.lockutils [req-73081980-58a1-4fde-aa90-523ed830a9db req-677e73cb-6d28-48b2-84b4-9488a65d35b3 service nova] Acquiring lock "refresh_cache-02db92ec-3377-406b-a95c-0022579fa75b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1796.377324] env[63024]: DEBUG oslo_concurrency.lockutils [req-73081980-58a1-4fde-aa90-523ed830a9db req-677e73cb-6d28-48b2-84b4-9488a65d35b3 service nova] Acquired lock "refresh_cache-02db92ec-3377-406b-a95c-0022579fa75b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1796.377934] env[63024]: DEBUG nova.network.neutron [req-73081980-58a1-4fde-aa90-523ed830a9db req-677e73cb-6d28-48b2-84b4-9488a65d35b3 service nova] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] Refreshing network info cache for port 16b0c4a3-52c9-4522-a517-2dc2dc94eac5 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1796.383061] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Created folder: Project (15259880cc744207b8b5113828a80440) in parent group-v401959. [ 1796.383244] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Creating folder: Instances. Parent ref: group-v402121. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1796.386314] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ca093268-f548-4f85-ab92-c9cb033fdfec {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.399069] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Created folder: Instances in parent group-v402121. [ 1796.399330] env[63024]: DEBUG oslo.service.loopingcall [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1796.401987] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1796.402607] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c547a2d1-a058-4367-8973-2f3c68e9e642 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.425816] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1796.425816] env[63024]: value = "task-1950986" [ 1796.425816] env[63024]: _type = "Task" [ 1796.425816] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1796.434469] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950986, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.651238] env[63024]: DEBUG oslo_vmware.api [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1950982, 'name': Rename_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.716273] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e149b1e-9aee-4814-89c5-0c240c491668 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.724363] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d0b5ad4-2d73-48a0-b957-ad8b638c4bc6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.762129] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d9ab0f6-2584-40fe-b461-90f6b235988a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.769774] env[63024]: DEBUG oslo_vmware.api [None req-b2979e49-67db-48b1-a73c-81e129a08c33 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950983, 'name': ReconfigVM_Task, 'duration_secs': 0.312954} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1796.771777] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-b2979e49-67db-48b1-a73c-81e129a08c33 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Reconfigured VM instance instance-0000002f to detach disk 2001 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1796.771967] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2979e49-67db-48b1-a73c-81e129a08c33 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1796.772307] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3080d296-1b0b-4f54-b564-db708f04edf9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.775070] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bf3b2d0-f283-47af-80a0-32bc275804e4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.788865] env[63024]: DEBUG nova.compute.provider_tree [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1796.791327] env[63024]: DEBUG oslo_vmware.api [None req-b2979e49-67db-48b1-a73c-81e129a08c33 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1796.791327] env[63024]: value = "task-1950987" [ 1796.791327] env[63024]: _type = "Task" [ 1796.791327] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1796.799682] env[63024]: DEBUG oslo_vmware.api [None req-b2979e49-67db-48b1-a73c-81e129a08c33 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950987, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1796.935384] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950986, 'name': CreateVM_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.048468] env[63024]: DEBUG nova.network.neutron [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] Successfully updated port: 15f01f95-afb7-4613-899d-bce865e8ac82 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1797.152022] env[63024]: DEBUG oslo_vmware.api [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1950982, 'name': Rename_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.250808] env[63024]: DEBUG nova.network.neutron [req-73081980-58a1-4fde-aa90-523ed830a9db req-677e73cb-6d28-48b2-84b4-9488a65d35b3 service nova] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] Updated VIF entry in instance network info cache for port 16b0c4a3-52c9-4522-a517-2dc2dc94eac5. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1797.251915] env[63024]: DEBUG nova.network.neutron [req-73081980-58a1-4fde-aa90-523ed830a9db req-677e73cb-6d28-48b2-84b4-9488a65d35b3 service nova] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] Updating instance_info_cache with network_info: [{"id": "16b0c4a3-52c9-4522-a517-2dc2dc94eac5", "address": "fa:16:3e:e5:3b:13", "network": {"id": "ef2e6069-4a95-4a79-9e36-681802b1c9a9", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-434138680-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "15259880cc744207b8b5113828a80440", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "27e0a333-0cad-496c-8e6e-37a2edc97ac4", "external-id": "nsx-vlan-transportzone-83", "segmentation_id": 83, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16b0c4a3-52", "ovs_interfaceid": "16b0c4a3-52c9-4522-a517-2dc2dc94eac5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1797.293574] env[63024]: DEBUG nova.scheduler.client.report [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1797.306456] env[63024]: DEBUG oslo_vmware.api [None req-b2979e49-67db-48b1-a73c-81e129a08c33 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1950987, 'name': PowerOnVM_Task, 'duration_secs': 0.371234} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1797.306456] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2979e49-67db-48b1-a73c-81e129a08c33 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1797.306655] env[63024]: DEBUG nova.compute.manager [None req-b2979e49-67db-48b1-a73c-81e129a08c33 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1797.307779] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc7242b9-a3ec-4d08-85a2-937c866751c1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.440085] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950986, 'name': CreateVM_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.550413] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "refresh_cache-6156ce17-3f29-487a-afc5-2fa0fb7f114c" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1797.550695] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquired lock "refresh_cache-6156ce17-3f29-487a-afc5-2fa0fb7f114c" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1797.550771] env[63024]: DEBUG nova.network.neutron [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1797.658410] env[63024]: DEBUG oslo_vmware.api [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1950982, 'name': Rename_Task, 'duration_secs': 1.230746} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1797.658410] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1797.658682] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6750293d-b551-49c4-96a5-680b3b55a5e1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.665128] env[63024]: DEBUG oslo_vmware.api [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1797.665128] env[63024]: value = "task-1950988" [ 1797.665128] env[63024]: _type = "Task" [ 1797.665128] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1797.675041] env[63024]: DEBUG oslo_vmware.api [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1950988, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1797.756199] env[63024]: DEBUG oslo_concurrency.lockutils [req-73081980-58a1-4fde-aa90-523ed830a9db req-677e73cb-6d28-48b2-84b4-9488a65d35b3 service nova] Releasing lock "refresh_cache-02db92ec-3377-406b-a95c-0022579fa75b" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1797.802433] env[63024]: DEBUG oslo_concurrency.lockutils [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.628s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1797.802950] env[63024]: DEBUG nova.compute.manager [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1797.807056] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ebfa6214-a527-4ee8-ac0d-86a78da86ba9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 49.939s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1797.807056] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ebfa6214-a527-4ee8-ac0d-86a78da86ba9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1797.810024] env[63024]: DEBUG oslo_concurrency.lockutils [None req-be9efe99-5e7f-465f-b857-ad9d14dd2357 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 46.941s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1797.810024] env[63024]: DEBUG oslo_concurrency.lockutils [None req-be9efe99-5e7f-465f-b857-ad9d14dd2357 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1797.810975] env[63024]: DEBUG oslo_concurrency.lockutils [None req-770550c8-eb49-4494-9d31-dc393d4d133b tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 41.327s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1797.811124] env[63024]: DEBUG oslo_concurrency.lockutils [None req-770550c8-eb49-4494-9d31-dc393d4d133b tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1797.811541] env[63024]: INFO nova.compute.manager [None req-770550c8-eb49-4494-9d31-dc393d4d133b tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Successfully reverted task state from rebuilding on failure for instance. [ 1797.817224] env[63024]: DEBUG oslo_concurrency.lockutils [None req-501bb556-91b2-49fd-b71a-0b980b59405d tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.772s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1797.820018] env[63024]: DEBUG oslo_concurrency.lockutils [None req-501bb556-91b2-49fd-b71a-0b980b59405d tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1797.820018] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cfa196c0-94c6-40cd-9df6-630a7786f6a4 tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.559s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1797.820018] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cfa196c0-94c6-40cd-9df6-630a7786f6a4 tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1797.821195] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.412s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1797.822962] env[63024]: INFO nova.compute.claims [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1797.865705] env[63024]: INFO nova.scheduler.client.report [None req-be9efe99-5e7f-465f-b857-ad9d14dd2357 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Deleted allocations for instance 9679a1a2-b003-4a60-a812-8b3a9b5f545f [ 1797.867485] env[63024]: INFO nova.scheduler.client.report [None req-501bb556-91b2-49fd-b71a-0b980b59405d tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Deleted allocations for instance 9cf45c3a-2a74-4f8e-8817-47bbd748a44b [ 1797.895257] env[63024]: INFO nova.scheduler.client.report [None req-ebfa6214-a527-4ee8-ac0d-86a78da86ba9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Deleted allocations for instance 8edc24d6-9073-4836-b14b-422df3ac1b88 [ 1797.940061] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950986, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.085910] env[63024]: DEBUG nova.network.neutron [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1798.177403] env[63024]: DEBUG oslo_vmware.api [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1950988, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.226384] env[63024]: DEBUG nova.network.neutron [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] Updating instance_info_cache with network_info: [{"id": "15f01f95-afb7-4613-899d-bce865e8ac82", "address": "fa:16:3e:15:51:3a", "network": {"id": "b800daa8-f0f9-4823-92b0-ff8d853cc0ff", "bridge": "br-int", "label": "tempest-ServersTestJSON-1406445940-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18540818b60e4483963d14559bc5c38d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec763be6-4041-4651-8fd7-3820cf0ab86d", "external-id": "nsx-vlan-transportzone-943", "segmentation_id": 943, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15f01f95-af", "ovs_interfaceid": "15f01f95-afb7-4613-899d-bce865e8ac82", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1798.330388] env[63024]: DEBUG nova.compute.utils [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1798.330388] env[63024]: DEBUG nova.compute.manager [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1798.330388] env[63024]: DEBUG nova.network.neutron [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1798.360180] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cfa196c0-94c6-40cd-9df6-630a7786f6a4 tempest-ServerActionsV293TestJSON-1451067865 tempest-ServerActionsV293TestJSON-1451067865-project-member] Lock "49eb6292-012a-4296-aff8-9c460866a602" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.159s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1798.386878] env[63024]: DEBUG oslo_concurrency.lockutils [None req-be9efe99-5e7f-465f-b857-ad9d14dd2357 tempest-ServersV294TestFqdnHostnames-1775150212 tempest-ServersV294TestFqdnHostnames-1775150212-project-member] Lock "9679a1a2-b003-4a60-a812-8b3a9b5f545f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 52.123s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1798.386878] env[63024]: DEBUG oslo_concurrency.lockutils [None req-501bb556-91b2-49fd-b71a-0b980b59405d tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "9cf45c3a-2a74-4f8e-8817-47bbd748a44b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.772s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1798.405174] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ebfa6214-a527-4ee8-ac0d-86a78da86ba9 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "8edc24d6-9073-4836-b14b-422df3ac1b88" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 54.313s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1798.409624] env[63024]: DEBUG nova.policy [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '17fb87e2577e46858934f157a394a590', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9981ec11228244fd8b75ee951a940c85', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1798.438530] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950986, 'name': CreateVM_Task, 'duration_secs': 1.53366} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1798.438727] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1798.439517] env[63024]: DEBUG oslo_concurrency.lockutils [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1798.439703] env[63024]: DEBUG oslo_concurrency.lockutils [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1798.440062] env[63024]: DEBUG oslo_concurrency.lockutils [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1798.440361] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f7bf2ff-11e5-4c9a-a062-2115ce2ca43c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.446608] env[63024]: DEBUG oslo_vmware.api [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Waiting for the task: (returnval){ [ 1798.446608] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]520eed81-6159-481e-969f-a8675e96cdf0" [ 1798.446608] env[63024]: _type = "Task" [ 1798.446608] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1798.455166] env[63024]: DEBUG oslo_vmware.api [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]520eed81-6159-481e-969f-a8675e96cdf0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.508050] env[63024]: DEBUG nova.compute.manager [req-5a883763-227e-4d93-aaba-8fb8880391a2 req-db153f51-ec0c-4a1b-89f0-fb50d5a627eb service nova] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] Received event network-vif-plugged-15f01f95-afb7-4613-899d-bce865e8ac82 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1798.508288] env[63024]: DEBUG oslo_concurrency.lockutils [req-5a883763-227e-4d93-aaba-8fb8880391a2 req-db153f51-ec0c-4a1b-89f0-fb50d5a627eb service nova] Acquiring lock "6156ce17-3f29-487a-afc5-2fa0fb7f114c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1798.508534] env[63024]: DEBUG oslo_concurrency.lockutils [req-5a883763-227e-4d93-aaba-8fb8880391a2 req-db153f51-ec0c-4a1b-89f0-fb50d5a627eb service nova] Lock "6156ce17-3f29-487a-afc5-2fa0fb7f114c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1798.508766] env[63024]: DEBUG oslo_concurrency.lockutils [req-5a883763-227e-4d93-aaba-8fb8880391a2 req-db153f51-ec0c-4a1b-89f0-fb50d5a627eb service nova] Lock "6156ce17-3f29-487a-afc5-2fa0fb7f114c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1798.508890] env[63024]: DEBUG nova.compute.manager [req-5a883763-227e-4d93-aaba-8fb8880391a2 req-db153f51-ec0c-4a1b-89f0-fb50d5a627eb service nova] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] No waiting events found dispatching network-vif-plugged-15f01f95-afb7-4613-899d-bce865e8ac82 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1798.508994] env[63024]: WARNING nova.compute.manager [req-5a883763-227e-4d93-aaba-8fb8880391a2 req-db153f51-ec0c-4a1b-89f0-fb50d5a627eb service nova] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] Received unexpected event network-vif-plugged-15f01f95-afb7-4613-899d-bce865e8ac82 for instance with vm_state building and task_state spawning. [ 1798.509259] env[63024]: DEBUG nova.compute.manager [req-5a883763-227e-4d93-aaba-8fb8880391a2 req-db153f51-ec0c-4a1b-89f0-fb50d5a627eb service nova] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] Received event network-changed-15f01f95-afb7-4613-899d-bce865e8ac82 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1798.509385] env[63024]: DEBUG nova.compute.manager [req-5a883763-227e-4d93-aaba-8fb8880391a2 req-db153f51-ec0c-4a1b-89f0-fb50d5a627eb service nova] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] Refreshing instance network info cache due to event network-changed-15f01f95-afb7-4613-899d-bce865e8ac82. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1798.509551] env[63024]: DEBUG oslo_concurrency.lockutils [req-5a883763-227e-4d93-aaba-8fb8880391a2 req-db153f51-ec0c-4a1b-89f0-fb50d5a627eb service nova] Acquiring lock "refresh_cache-6156ce17-3f29-487a-afc5-2fa0fb7f114c" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1798.681146] env[63024]: DEBUG oslo_vmware.api [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1950988, 'name': PowerOnVM_Task, 'duration_secs': 0.617252} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1798.681146] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1798.681329] env[63024]: DEBUG nova.compute.manager [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1798.682443] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-263de9fd-08e2-4dbf-ae01-e5680fca964e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.729332] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Releasing lock "refresh_cache-6156ce17-3f29-487a-afc5-2fa0fb7f114c" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1798.729654] env[63024]: DEBUG nova.compute.manager [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] Instance network_info: |[{"id": "15f01f95-afb7-4613-899d-bce865e8ac82", "address": "fa:16:3e:15:51:3a", "network": {"id": "b800daa8-f0f9-4823-92b0-ff8d853cc0ff", "bridge": "br-int", "label": "tempest-ServersTestJSON-1406445940-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18540818b60e4483963d14559bc5c38d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec763be6-4041-4651-8fd7-3820cf0ab86d", "external-id": "nsx-vlan-transportzone-943", "segmentation_id": 943, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15f01f95-af", "ovs_interfaceid": "15f01f95-afb7-4613-899d-bce865e8ac82", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1798.729960] env[63024]: DEBUG oslo_concurrency.lockutils [req-5a883763-227e-4d93-aaba-8fb8880391a2 req-db153f51-ec0c-4a1b-89f0-fb50d5a627eb service nova] Acquired lock "refresh_cache-6156ce17-3f29-487a-afc5-2fa0fb7f114c" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1798.730150] env[63024]: DEBUG nova.network.neutron [req-5a883763-227e-4d93-aaba-8fb8880391a2 req-db153f51-ec0c-4a1b-89f0-fb50d5a627eb service nova] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] Refreshing network info cache for port 15f01f95-afb7-4613-899d-bce865e8ac82 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1798.731448] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:15:51:3a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ec763be6-4041-4651-8fd7-3820cf0ab86d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '15f01f95-afb7-4613-899d-bce865e8ac82', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1798.745509] env[63024]: DEBUG oslo.service.loopingcall [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1798.746926] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1798.747249] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ad34b569-a711-45af-ae65-7cae2a96cdca {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.770629] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1798.770629] env[63024]: value = "task-1950989" [ 1798.770629] env[63024]: _type = "Task" [ 1798.770629] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1798.779202] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950989, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.838384] env[63024]: DEBUG nova.compute.utils [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1798.876413] env[63024]: DEBUG nova.network.neutron [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Successfully created port: f4b23f8c-5413-42ca-abeb-eda669ea2fe5 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1798.964528] env[63024]: DEBUG oslo_vmware.api [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]520eed81-6159-481e-969f-a8675e96cdf0, 'name': SearchDatastore_Task, 'duration_secs': 0.009935} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1798.964844] env[63024]: DEBUG oslo_concurrency.lockutils [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1798.965150] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1798.965330] env[63024]: DEBUG oslo_concurrency.lockutils [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1798.965471] env[63024]: DEBUG oslo_concurrency.lockutils [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1798.965648] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1798.965919] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d0b39e8a-0ed4-4e46-ac9b-bbaae4dd9f58 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.977352] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1798.977352] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1798.977352] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0bab980c-bc2c-4266-8d01-010cc3c4b0f8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.982014] env[63024]: DEBUG oslo_vmware.api [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Waiting for the task: (returnval){ [ 1798.982014] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]521ef82b-bd90-a197-fdbc-a5f1d69aac87" [ 1798.982014] env[63024]: _type = "Task" [ 1798.982014] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1798.995549] env[63024]: DEBUG oslo_vmware.api [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]521ef82b-bd90-a197-fdbc-a5f1d69aac87, 'name': SearchDatastore_Task, 'duration_secs': 0.009078} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1798.996908] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3df4b4b5-054f-42fd-b264-987dd8a21c0c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.004277] env[63024]: DEBUG oslo_vmware.api [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Waiting for the task: (returnval){ [ 1799.004277] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]526b54b2-dbc1-c7fd-894e-b6451227baae" [ 1799.004277] env[63024]: _type = "Task" [ 1799.004277] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1799.019010] env[63024]: DEBUG oslo_vmware.api [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]526b54b2-dbc1-c7fd-894e-b6451227baae, 'name': SearchDatastore_Task, 'duration_secs': 0.013228} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1799.019311] env[63024]: DEBUG oslo_concurrency.lockutils [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1799.019618] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 02db92ec-3377-406b-a95c-0022579fa75b/02db92ec-3377-406b-a95c-0022579fa75b.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1799.019835] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bcfa3ee8-a7f7-4704-a457-8e60c05a7221 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.027144] env[63024]: DEBUG oslo_vmware.api [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Waiting for the task: (returnval){ [ 1799.027144] env[63024]: value = "task-1950990" [ 1799.027144] env[63024]: _type = "Task" [ 1799.027144] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1799.035595] env[63024]: DEBUG oslo_vmware.api [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Task: {'id': task-1950990, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.196067] env[63024]: INFO nova.compute.manager [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] bringing vm to original state: 'stopped' [ 1799.285659] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1950989, 'name': CreateVM_Task, 'duration_secs': 0.422298} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1799.288845] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1799.289830] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1799.290072] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1799.290443] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1799.290680] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10997c59-27fd-4c11-ae45-8974a5f55260 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.296624] env[63024]: DEBUG oslo_vmware.api [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 1799.296624] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5226c0e2-210f-d93e-070e-d8cd7f8a81fd" [ 1799.296624] env[63024]: _type = "Task" [ 1799.296624] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1799.307395] env[63024]: DEBUG oslo_vmware.api [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5226c0e2-210f-d93e-070e-d8cd7f8a81fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.350024] env[63024]: DEBUG nova.compute.manager [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1799.492548] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4804ed92-9c5a-4081-817d-27a5b6d3f814 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.503540] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a405bb6b-1772-4aba-8db1-2695729a9352 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.549293] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8587505-18d5-45fa-b6b2-ff6eaeb610be {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.557898] env[63024]: DEBUG oslo_vmware.api [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Task: {'id': task-1950990, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.529548} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1799.560106] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 02db92ec-3377-406b-a95c-0022579fa75b/02db92ec-3377-406b-a95c-0022579fa75b.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1799.560346] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1799.560632] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7030fe1b-1b57-470c-87de-e9b29ea044e0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.563529] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-279d49f6-f96c-4650-a2ce-803ebd7bf896 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.587346] env[63024]: DEBUG oslo_vmware.api [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Waiting for the task: (returnval){ [ 1799.587346] env[63024]: value = "task-1950991" [ 1799.587346] env[63024]: _type = "Task" [ 1799.587346] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1799.587346] env[63024]: DEBUG nova.compute.provider_tree [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1799.597485] env[63024]: DEBUG oslo_vmware.api [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Task: {'id': task-1950991, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1799.807971] env[63024]: DEBUG oslo_vmware.api [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5226c0e2-210f-d93e-070e-d8cd7f8a81fd, 'name': SearchDatastore_Task, 'duration_secs': 0.055161} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1799.807971] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1799.807971] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1799.807971] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1799.808219] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1799.808219] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1799.808463] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-714fe6c9-f674-43fd-9f9c-8c6dfbf6bc77 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.818124] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1799.818124] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1799.818240] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-71342576-fef0-4b40-b972-3df5ac91d014 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1799.823991] env[63024]: DEBUG oslo_vmware.api [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 1799.823991] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]527f62a1-9503-3a33-e2d0-02e17d5262e1" [ 1799.823991] env[63024]: _type = "Task" [ 1799.823991] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1799.831252] env[63024]: DEBUG oslo_vmware.api [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]527f62a1-9503-3a33-e2d0-02e17d5262e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.087166] env[63024]: DEBUG nova.network.neutron [req-5a883763-227e-4d93-aaba-8fb8880391a2 req-db153f51-ec0c-4a1b-89f0-fb50d5a627eb service nova] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] Updated VIF entry in instance network info cache for port 15f01f95-afb7-4613-899d-bce865e8ac82. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1800.087532] env[63024]: DEBUG nova.network.neutron [req-5a883763-227e-4d93-aaba-8fb8880391a2 req-db153f51-ec0c-4a1b-89f0-fb50d5a627eb service nova] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] Updating instance_info_cache with network_info: [{"id": "15f01f95-afb7-4613-899d-bce865e8ac82", "address": "fa:16:3e:15:51:3a", "network": {"id": "b800daa8-f0f9-4823-92b0-ff8d853cc0ff", "bridge": "br-int", "label": "tempest-ServersTestJSON-1406445940-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18540818b60e4483963d14559bc5c38d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec763be6-4041-4651-8fd7-3820cf0ab86d", "external-id": "nsx-vlan-transportzone-943", "segmentation_id": 943, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15f01f95-af", "ovs_interfaceid": "15f01f95-afb7-4613-899d-bce865e8ac82", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1800.093919] env[63024]: DEBUG nova.scheduler.client.report [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1800.106385] env[63024]: DEBUG oslo_vmware.api [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Task: {'id': task-1950991, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.231126} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1800.113364] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1800.113364] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a0fc3a5-bb05-4a9d-a200-b6a5d89a02fc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.141033] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] Reconfiguring VM instance instance-00000036 to attach disk [datastore1] 02db92ec-3377-406b-a95c-0022579fa75b/02db92ec-3377-406b-a95c-0022579fa75b.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1800.141033] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4eb8b7c2-6637-4a45-8223-68a359ecc404 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.162649] env[63024]: DEBUG oslo_vmware.api [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Waiting for the task: (returnval){ [ 1800.162649] env[63024]: value = "task-1950992" [ 1800.162649] env[63024]: _type = "Task" [ 1800.162649] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1800.172161] env[63024]: DEBUG oslo_vmware.api [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Task: {'id': task-1950992, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.206519] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "2dd20650-9273-432a-be28-73ccb66c721d" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1800.206800] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "2dd20650-9273-432a-be28-73ccb66c721d" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1800.208729] env[63024]: DEBUG nova.compute.manager [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1800.208729] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-656a6816-2b44-44a3-8591-1b453ee037bf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.215129] env[63024]: DEBUG nova.compute.manager [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63024) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1800.337043] env[63024]: DEBUG oslo_vmware.api [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]527f62a1-9503-3a33-e2d0-02e17d5262e1, 'name': SearchDatastore_Task, 'duration_secs': 0.020728} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1800.338411] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8723523d-8137-453b-a256-2b374eb807f9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.345564] env[63024]: DEBUG oslo_vmware.api [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 1800.345564] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52fc20cf-1892-299b-0ce0-fbea512c30a7" [ 1800.345564] env[63024]: _type = "Task" [ 1800.345564] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1800.356718] env[63024]: DEBUG oslo_vmware.api [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52fc20cf-1892-299b-0ce0-fbea512c30a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.362458] env[63024]: DEBUG nova.compute.manager [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1800.400812] env[63024]: DEBUG nova.virt.hardware [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T11:07:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=1,extra_specs={hw_rng:allowed='True'},flavorid='1147934672',id=32,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_1-1590159141',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1800.401200] env[63024]: DEBUG nova.virt.hardware [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1800.401424] env[63024]: DEBUG nova.virt.hardware [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1800.401686] env[63024]: DEBUG nova.virt.hardware [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1800.401887] env[63024]: DEBUG nova.virt.hardware [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1800.402162] env[63024]: DEBUG nova.virt.hardware [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1800.402452] env[63024]: DEBUG nova.virt.hardware [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1800.402702] env[63024]: DEBUG nova.virt.hardware [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1800.402978] env[63024]: DEBUG nova.virt.hardware [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1800.403262] env[63024]: DEBUG nova.virt.hardware [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1800.403527] env[63024]: DEBUG nova.virt.hardware [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1800.404659] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-704006e2-3554-43cd-acf6-82c0bf1d9be6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.413853] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73615156-aa1f-4237-819d-8c44a2c48d87 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.595033] env[63024]: DEBUG oslo_concurrency.lockutils [req-5a883763-227e-4d93-aaba-8fb8880391a2 req-db153f51-ec0c-4a1b-89f0-fb50d5a627eb service nova] Releasing lock "refresh_cache-6156ce17-3f29-487a-afc5-2fa0fb7f114c" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1800.600530] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.779s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1800.601407] env[63024]: DEBUG nova.compute.manager [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1800.608689] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.791s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1800.608689] env[63024]: INFO nova.compute.claims [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1800.677757] env[63024]: DEBUG oslo_vmware.api [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Task: {'id': task-1950992, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.724754] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1800.724754] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-31928db3-4fe7-4a3d-8748-0254b9894d35 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.730458] env[63024]: DEBUG oslo_vmware.api [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1800.730458] env[63024]: value = "task-1950993" [ 1800.730458] env[63024]: _type = "Task" [ 1800.730458] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1800.738998] env[63024]: DEBUG oslo_vmware.api [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1950993, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1800.866486] env[63024]: DEBUG oslo_vmware.api [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52fc20cf-1892-299b-0ce0-fbea512c30a7, 'name': SearchDatastore_Task, 'duration_secs': 0.023162} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1800.867939] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1800.867939] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 6156ce17-3f29-487a-afc5-2fa0fb7f114c/6156ce17-3f29-487a-afc5-2fa0fb7f114c.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1800.867939] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-57320230-0104-4238-87fa-0f4ad130b9c6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1800.875861] env[63024]: DEBUG oslo_vmware.api [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 1800.875861] env[63024]: value = "task-1950994" [ 1800.875861] env[63024]: _type = "Task" [ 1800.875861] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1800.886691] env[63024]: DEBUG oslo_vmware.api [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1950994, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.116802] env[63024]: DEBUG nova.compute.utils [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1801.118756] env[63024]: DEBUG nova.compute.manager [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1801.119120] env[63024]: DEBUG nova.network.neutron [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1801.173741] env[63024]: DEBUG oslo_vmware.api [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Task: {'id': task-1950992, 'name': ReconfigVM_Task, 'duration_secs': 0.839101} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1801.174013] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] Reconfigured VM instance instance-00000036 to attach disk [datastore1] 02db92ec-3377-406b-a95c-0022579fa75b/02db92ec-3377-406b-a95c-0022579fa75b.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1801.174690] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fd50a8c3-2545-48eb-8003-2abe79392b66 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.182138] env[63024]: DEBUG oslo_vmware.api [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Waiting for the task: (returnval){ [ 1801.182138] env[63024]: value = "task-1950995" [ 1801.182138] env[63024]: _type = "Task" [ 1801.182138] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1801.192364] env[63024]: DEBUG oslo_vmware.api [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Task: {'id': task-1950995, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.242997] env[63024]: DEBUG oslo_vmware.api [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1950993, 'name': PowerOffVM_Task, 'duration_secs': 0.188546} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1801.243389] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1801.245806] env[63024]: DEBUG nova.compute.manager [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1801.245806] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15b5a219-c54f-4c5b-ac2b-646b0148a822 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.294097] env[63024]: DEBUG nova.policy [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3fc112b4851e4dbeac3a69409e7bf98e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1886be852b01400aaf7a31c8fe5d4d7a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1801.330731] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Acquiring lock "77c27741-ee3a-4a8b-bbd3-89759288f7c6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1801.331051] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Lock "77c27741-ee3a-4a8b-bbd3-89759288f7c6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1801.389592] env[63024]: DEBUG oslo_vmware.api [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1950994, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.493399} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1801.390490] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 6156ce17-3f29-487a-afc5-2fa0fb7f114c/6156ce17-3f29-487a-afc5-2fa0fb7f114c.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1801.390490] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1801.390572] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eeeebed6-1382-4b8a-911d-091159f9f248 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.401914] env[63024]: DEBUG oslo_vmware.api [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 1801.401914] env[63024]: value = "task-1950996" [ 1801.401914] env[63024]: _type = "Task" [ 1801.401914] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1801.409414] env[63024]: DEBUG oslo_vmware.api [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1950996, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.429320] env[63024]: DEBUG nova.compute.manager [req-a1b126ca-fa90-4116-8f0d-290e0738ce12 req-a93dd3a5-d44e-473f-929d-55c79282e196 service nova] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Received event network-vif-plugged-f4b23f8c-5413-42ca-abeb-eda669ea2fe5 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1801.429320] env[63024]: DEBUG oslo_concurrency.lockutils [req-a1b126ca-fa90-4116-8f0d-290e0738ce12 req-a93dd3a5-d44e-473f-929d-55c79282e196 service nova] Acquiring lock "9267e5e4-732d-47f1-8a30-d926a1269fb9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1801.429320] env[63024]: DEBUG oslo_concurrency.lockutils [req-a1b126ca-fa90-4116-8f0d-290e0738ce12 req-a93dd3a5-d44e-473f-929d-55c79282e196 service nova] Lock "9267e5e4-732d-47f1-8a30-d926a1269fb9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1801.429320] env[63024]: DEBUG oslo_concurrency.lockutils [req-a1b126ca-fa90-4116-8f0d-290e0738ce12 req-a93dd3a5-d44e-473f-929d-55c79282e196 service nova] Lock "9267e5e4-732d-47f1-8a30-d926a1269fb9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1801.429320] env[63024]: DEBUG nova.compute.manager [req-a1b126ca-fa90-4116-8f0d-290e0738ce12 req-a93dd3a5-d44e-473f-929d-55c79282e196 service nova] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] No waiting events found dispatching network-vif-plugged-f4b23f8c-5413-42ca-abeb-eda669ea2fe5 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1801.429597] env[63024]: WARNING nova.compute.manager [req-a1b126ca-fa90-4116-8f0d-290e0738ce12 req-a93dd3a5-d44e-473f-929d-55c79282e196 service nova] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Received unexpected event network-vif-plugged-f4b23f8c-5413-42ca-abeb-eda669ea2fe5 for instance with vm_state building and task_state spawning. [ 1801.488729] env[63024]: DEBUG nova.network.neutron [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Successfully updated port: f4b23f8c-5413-42ca-abeb-eda669ea2fe5 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1801.623906] env[63024]: DEBUG nova.compute.manager [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1801.696625] env[63024]: DEBUG oslo_vmware.api [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Task: {'id': task-1950995, 'name': Rename_Task, 'duration_secs': 0.266982} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1801.697367] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1801.697626] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2f44f2ec-5035-4597-a96e-870caadffde5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.706198] env[63024]: DEBUG oslo_vmware.api [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Waiting for the task: (returnval){ [ 1801.706198] env[63024]: value = "task-1950997" [ 1801.706198] env[63024]: _type = "Task" [ 1801.706198] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1801.714381] env[63024]: DEBUG oslo_vmware.api [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Task: {'id': task-1950997, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.763016] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "2dd20650-9273-432a-be28-73ccb66c721d" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.554s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1801.908671] env[63024]: DEBUG oslo_vmware.api [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1950996, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065506} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1801.909015] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1801.909935] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa87887b-15d5-4595-935f-fb36909c21a7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.936026] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] Reconfiguring VM instance instance-00000037 to attach disk [datastore1] 6156ce17-3f29-487a-afc5-2fa0fb7f114c/6156ce17-3f29-487a-afc5-2fa0fb7f114c.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1801.941491] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9e546c69-1d10-4a2c-9dda-24f10dec7085 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1801.962166] env[63024]: DEBUG oslo_vmware.api [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 1801.962166] env[63024]: value = "task-1950998" [ 1801.962166] env[63024]: _type = "Task" [ 1801.962166] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1801.970695] env[63024]: DEBUG oslo_vmware.api [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1950998, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1801.992801] env[63024]: DEBUG oslo_concurrency.lockutils [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Acquiring lock "refresh_cache-9267e5e4-732d-47f1-8a30-d926a1269fb9" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1801.992974] env[63024]: DEBUG oslo_concurrency.lockutils [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Acquired lock "refresh_cache-9267e5e4-732d-47f1-8a30-d926a1269fb9" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1801.993337] env[63024]: DEBUG nova.network.neutron [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1802.216623] env[63024]: DEBUG oslo_vmware.api [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Task: {'id': task-1950997, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.217971] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-126609df-af03-4c78-875c-51653e9361a1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.225886] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10f6597a-e2e5-486c-9318-4ffa15063528 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.257452] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f544fbe-2041-4f51-8bb9-f0a84da81f94 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.273046] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1130278c-acd1-44f1-93d7-ef1cb112fcab {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.278902] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1802.279763] env[63024]: DEBUG nova.network.neutron [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Successfully created port: 611e1e79-ffb8-4ba9-8718-b57360eaa492 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1802.291426] env[63024]: DEBUG nova.compute.provider_tree [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1802.475881] env[63024]: DEBUG oslo_vmware.api [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1950998, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1802.591960] env[63024]: DEBUG nova.network.neutron [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1802.638277] env[63024]: DEBUG nova.compute.manager [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1802.673802] env[63024]: DEBUG nova.virt.hardware [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1802.673802] env[63024]: DEBUG nova.virt.hardware [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1802.673802] env[63024]: DEBUG nova.virt.hardware [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1802.673802] env[63024]: DEBUG nova.virt.hardware [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1802.673802] env[63024]: DEBUG nova.virt.hardware [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1802.673802] env[63024]: DEBUG nova.virt.hardware [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1802.673802] env[63024]: DEBUG nova.virt.hardware [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1802.673802] env[63024]: DEBUG nova.virt.hardware [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1802.673802] env[63024]: DEBUG nova.virt.hardware [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1802.673802] env[63024]: DEBUG nova.virt.hardware [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1802.673802] env[63024]: DEBUG nova.virt.hardware [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1802.674450] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a59b07d8-c1d7-418f-b938-c0db73cc6847 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.683566] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58b0613f-8bb3-4434-b537-a8d3ec11e225 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.715845] env[63024]: DEBUG oslo_vmware.api [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Task: {'id': task-1950997, 'name': PowerOnVM_Task, 'duration_secs': 0.850278} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1802.716150] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1802.716354] env[63024]: INFO nova.compute.manager [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] Took 9.18 seconds to spawn the instance on the hypervisor. [ 1802.716871] env[63024]: DEBUG nova.compute.manager [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1802.717687] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daa48e55-4688-48ce-8d7f-757bc328bb7e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.795136] env[63024]: DEBUG nova.scheduler.client.report [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1802.982201] env[63024]: DEBUG oslo_vmware.api [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1950998, 'name': ReconfigVM_Task, 'duration_secs': 0.632864} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1802.982484] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] Reconfigured VM instance instance-00000037 to attach disk [datastore1] 6156ce17-3f29-487a-afc5-2fa0fb7f114c/6156ce17-3f29-487a-afc5-2fa0fb7f114c.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1802.983156] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-63f105b7-821f-4e4d-acf8-b824feeaeecf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1802.990062] env[63024]: DEBUG oslo_vmware.api [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 1802.990062] env[63024]: value = "task-1950999" [ 1802.990062] env[63024]: _type = "Task" [ 1802.990062] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1803.779581] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.173s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1803.779581] env[63024]: DEBUG nova.compute.manager [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1803.783033] env[63024]: DEBUG oslo_vmware.api [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1950999, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1803.783033] env[63024]: WARNING oslo_vmware.common.loopingcall [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] task run outlasted interval by 0.29181 sec [ 1803.784568] env[63024]: DEBUG nova.network.neutron [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Updating instance_info_cache with network_info: [{"id": "f4b23f8c-5413-42ca-abeb-eda669ea2fe5", "address": "fa:16:3e:05:a4:75", "network": {"id": "1c8709f7-097f-4437-bae1-c45d9a4d02f1", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1121041191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9981ec11228244fd8b75ee951a940c85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0248a27a-1d7f-4195-987b-06bfc8467347", "external-id": "nsx-vlan-transportzone-26", "segmentation_id": 26, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4b23f8c-54", "ovs_interfaceid": "f4b23f8c-5413-42ca-abeb-eda669ea2fe5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1803.785548] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.597s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1803.786933] env[63024]: INFO nova.compute.claims [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1803.790596] env[63024]: INFO nova.compute.manager [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] Took 67.62 seconds to build instance. [ 1803.802617] env[63024]: DEBUG oslo_vmware.api [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1950999, 'name': Rename_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.051369] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquiring lock "01b8072a-4483-4932-8294-7e5b48e6b203" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1804.051630] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Lock "01b8072a-4483-4932-8294-7e5b48e6b203" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1804.192102] env[63024]: DEBUG nova.compute.manager [req-6dd60a69-98a0-4c36-9e75-a3b875e0c61d req-9041a11a-98f8-4457-9579-c373bcaa76f8 service nova] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Received event network-changed-f4b23f8c-5413-42ca-abeb-eda669ea2fe5 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1804.192352] env[63024]: DEBUG nova.compute.manager [req-6dd60a69-98a0-4c36-9e75-a3b875e0c61d req-9041a11a-98f8-4457-9579-c373bcaa76f8 service nova] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Refreshing instance network info cache due to event network-changed-f4b23f8c-5413-42ca-abeb-eda669ea2fe5. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1804.192551] env[63024]: DEBUG oslo_concurrency.lockutils [req-6dd60a69-98a0-4c36-9e75-a3b875e0c61d req-9041a11a-98f8-4457-9579-c373bcaa76f8 service nova] Acquiring lock "refresh_cache-9267e5e4-732d-47f1-8a30-d926a1269fb9" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1804.284180] env[63024]: DEBUG nova.compute.utils [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1804.287146] env[63024]: DEBUG nova.compute.manager [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1804.291019] env[63024]: DEBUG nova.network.neutron [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1804.291019] env[63024]: DEBUG oslo_concurrency.lockutils [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Releasing lock "refresh_cache-9267e5e4-732d-47f1-8a30-d926a1269fb9" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1804.291019] env[63024]: DEBUG nova.compute.manager [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Instance network_info: |[{"id": "f4b23f8c-5413-42ca-abeb-eda669ea2fe5", "address": "fa:16:3e:05:a4:75", "network": {"id": "1c8709f7-097f-4437-bae1-c45d9a4d02f1", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1121041191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9981ec11228244fd8b75ee951a940c85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0248a27a-1d7f-4195-987b-06bfc8467347", "external-id": "nsx-vlan-transportzone-26", "segmentation_id": 26, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4b23f8c-54", "ovs_interfaceid": "f4b23f8c-5413-42ca-abeb-eda669ea2fe5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1804.294743] env[63024]: DEBUG oslo_concurrency.lockutils [req-6dd60a69-98a0-4c36-9e75-a3b875e0c61d req-9041a11a-98f8-4457-9579-c373bcaa76f8 service nova] Acquired lock "refresh_cache-9267e5e4-732d-47f1-8a30-d926a1269fb9" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1804.296114] env[63024]: DEBUG nova.network.neutron [req-6dd60a69-98a0-4c36-9e75-a3b875e0c61d req-9041a11a-98f8-4457-9579-c373bcaa76f8 service nova] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Refreshing network info cache for port f4b23f8c-5413-42ca-abeb-eda669ea2fe5 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1804.296960] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:05:a4:75', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0248a27a-1d7f-4195-987b-06bfc8467347', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f4b23f8c-5413-42ca-abeb-eda669ea2fe5', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1804.308837] env[63024]: DEBUG oslo.service.loopingcall [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1804.310612] env[63024]: DEBUG oslo_concurrency.lockutils [None req-52b23273-9604-40d2-8893-5309261eb5b7 tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Lock "02db92ec-3377-406b-a95c-0022579fa75b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.151s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1804.313186] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1804.315400] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-654ea04f-6c13-4763-a255-6efc878f7347 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.336580] env[63024]: DEBUG oslo_vmware.api [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1950999, 'name': Rename_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.342617] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1804.342617] env[63024]: value = "task-1951000" [ 1804.342617] env[63024]: _type = "Task" [ 1804.342617] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1804.352570] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951000, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.657219] env[63024]: DEBUG nova.policy [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4a59445f732e4801b5e6020b488adb59', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5577b40f56af44eebd47761192e9510f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1804.657219] env[63024]: DEBUG oslo_concurrency.lockutils [None req-55114a38-c7d5-4672-b611-8820a7d7a086 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "2dd20650-9273-432a-be28-73ccb66c721d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1804.657398] env[63024]: DEBUG oslo_concurrency.lockutils [None req-55114a38-c7d5-4672-b611-8820a7d7a086 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "2dd20650-9273-432a-be28-73ccb66c721d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1804.657594] env[63024]: DEBUG oslo_concurrency.lockutils [None req-55114a38-c7d5-4672-b611-8820a7d7a086 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "2dd20650-9273-432a-be28-73ccb66c721d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1804.657767] env[63024]: DEBUG oslo_concurrency.lockutils [None req-55114a38-c7d5-4672-b611-8820a7d7a086 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "2dd20650-9273-432a-be28-73ccb66c721d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1804.657923] env[63024]: DEBUG oslo_concurrency.lockutils [None req-55114a38-c7d5-4672-b611-8820a7d7a086 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "2dd20650-9273-432a-be28-73ccb66c721d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1804.660520] env[63024]: INFO nova.compute.manager [None req-55114a38-c7d5-4672-b611-8820a7d7a086 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Terminating instance [ 1804.791740] env[63024]: DEBUG nova.compute.manager [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1804.809966] env[63024]: DEBUG oslo_vmware.api [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1950999, 'name': Rename_Task, 'duration_secs': 1.33165} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1804.812759] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1804.813208] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2e7db08a-c40a-444e-8893-0ab7c6151ffc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.825425] env[63024]: DEBUG oslo_vmware.api [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 1804.825425] env[63024]: value = "task-1951001" [ 1804.825425] env[63024]: _type = "Task" [ 1804.825425] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1804.838943] env[63024]: DEBUG nova.compute.manager [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1804.842012] env[63024]: DEBUG oslo_vmware.api [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951001, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.854737] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951000, 'name': CreateVM_Task, 'duration_secs': 0.319823} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1804.854853] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1804.855582] env[63024]: DEBUG oslo_concurrency.lockutils [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1804.855732] env[63024]: DEBUG oslo_concurrency.lockutils [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1804.856253] env[63024]: DEBUG oslo_concurrency.lockutils [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1804.856521] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5095d440-b714-45e1-830a-54c4e72d047f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1804.863528] env[63024]: DEBUG oslo_vmware.api [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Waiting for the task: (returnval){ [ 1804.863528] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52a4ba16-877c-da53-66d2-33b24dc8712a" [ 1804.863528] env[63024]: _type = "Task" [ 1804.863528] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1804.883237] env[63024]: DEBUG oslo_vmware.api [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52a4ba16-877c-da53-66d2-33b24dc8712a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1804.994924] env[63024]: DEBUG nova.network.neutron [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Successfully updated port: 611e1e79-ffb8-4ba9-8718-b57360eaa492 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1805.094732] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "9a7f4452-ae50-4779-8474-11d3a6d3533f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1805.094971] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "9a7f4452-ae50-4779-8474-11d3a6d3533f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1805.164846] env[63024]: DEBUG nova.compute.manager [None req-55114a38-c7d5-4672-b611-8820a7d7a086 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1805.165175] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-55114a38-c7d5-4672-b611-8820a7d7a086 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1805.167343] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0fa9830-6ee5-43a5-98e9-7c5ebca733c1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.177078] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-55114a38-c7d5-4672-b611-8820a7d7a086 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1805.180502] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1abfbf52-21ac-4132-8428-b48073fb6e30 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.274280] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-55114a38-c7d5-4672-b611-8820a7d7a086 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1805.274497] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-55114a38-c7d5-4672-b611-8820a7d7a086 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1805.274675] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-55114a38-c7d5-4672-b611-8820a7d7a086 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Deleting the datastore file [datastore1] 2dd20650-9273-432a-be28-73ccb66c721d {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1805.274942] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c95b2c6a-8b5b-4963-bea0-50d3c883d64b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.285618] env[63024]: DEBUG oslo_vmware.api [None req-55114a38-c7d5-4672-b611-8820a7d7a086 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1805.285618] env[63024]: value = "task-1951003" [ 1805.285618] env[63024]: _type = "Task" [ 1805.285618] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1805.298563] env[63024]: DEBUG oslo_vmware.api [None req-55114a38-c7d5-4672-b611-8820a7d7a086 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951003, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.335946] env[63024]: DEBUG oslo_vmware.api [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951001, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.355062] env[63024]: DEBUG nova.network.neutron [req-6dd60a69-98a0-4c36-9e75-a3b875e0c61d req-9041a11a-98f8-4457-9579-c373bcaa76f8 service nova] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Updated VIF entry in instance network info cache for port f4b23f8c-5413-42ca-abeb-eda669ea2fe5. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1805.355169] env[63024]: DEBUG nova.network.neutron [req-6dd60a69-98a0-4c36-9e75-a3b875e0c61d req-9041a11a-98f8-4457-9579-c373bcaa76f8 service nova] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Updating instance_info_cache with network_info: [{"id": "f4b23f8c-5413-42ca-abeb-eda669ea2fe5", "address": "fa:16:3e:05:a4:75", "network": {"id": "1c8709f7-097f-4437-bae1-c45d9a4d02f1", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1121041191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9981ec11228244fd8b75ee951a940c85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0248a27a-1d7f-4195-987b-06bfc8467347", "external-id": "nsx-vlan-transportzone-26", "segmentation_id": 26, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4b23f8c-54", "ovs_interfaceid": "f4b23f8c-5413-42ca-abeb-eda669ea2fe5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1805.365962] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1805.376973] env[63024]: DEBUG oslo_vmware.api [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52a4ba16-877c-da53-66d2-33b24dc8712a, 'name': SearchDatastore_Task, 'duration_secs': 0.019482} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1805.379932] env[63024]: DEBUG oslo_concurrency.lockutils [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1805.380191] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1805.380451] env[63024]: DEBUG oslo_concurrency.lockutils [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1805.380581] env[63024]: DEBUG oslo_concurrency.lockutils [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1805.380755] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1805.381838] env[63024]: DEBUG nova.network.neutron [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] Successfully created port: d0439e29-9598-4648-991c-d2aff3b3fcf9 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1805.383864] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d3e7bc7b-2ac9-4206-9ae6-25dc6d234b13 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.400468] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1805.400468] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1805.400764] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-032c5b96-d215-4d2c-a629-e814c219990b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.407456] env[63024]: DEBUG oslo_vmware.api [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Waiting for the task: (returnval){ [ 1805.407456] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52996ac4-9ef9-977d-ac1f-27754791bfef" [ 1805.407456] env[63024]: _type = "Task" [ 1805.407456] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1805.418977] env[63024]: DEBUG oslo_vmware.api [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52996ac4-9ef9-977d-ac1f-27754791bfef, 'name': SearchDatastore_Task, 'duration_secs': 0.008428} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1805.419763] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bb28c007-7ef3-48d4-8116-c487ab364074 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.429019] env[63024]: DEBUG oslo_vmware.api [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Waiting for the task: (returnval){ [ 1805.429019] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b5f305-a574-77e4-a85a-1d1a5550bb82" [ 1805.429019] env[63024]: _type = "Task" [ 1805.429019] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1805.437171] env[63024]: DEBUG oslo_vmware.api [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b5f305-a574-77e4-a85a-1d1a5550bb82, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1805.470622] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68212cf3-7379-4666-9c7a-189f4e5f5eb1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.485375] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66437c25-0bb2-4034-a10c-780394980485 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.494083] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "refresh_cache-1709d916-d0c4-4706-b41b-8b0ed25f3331" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1805.494240] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquired lock "refresh_cache-1709d916-d0c4-4706-b41b-8b0ed25f3331" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1805.494516] env[63024]: DEBUG nova.network.neutron [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1805.530951] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b71895fa-8314-47d2-b314-a5613fb3c6d9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.541197] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e58bb67-22bf-4ee1-aab9-c657ded74760 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.556138] env[63024]: DEBUG nova.compute.provider_tree [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1805.571675] env[63024]: DEBUG nova.network.neutron [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1805.727836] env[63024]: DEBUG nova.network.neutron [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Updating instance_info_cache with network_info: [{"id": "611e1e79-ffb8-4ba9-8718-b57360eaa492", "address": "fa:16:3e:f8:21:2c", "network": {"id": "67ab6461-8fa5-4dfd-9267-561a710ae91b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1814728209-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1886be852b01400aaf7a31c8fe5d4d7a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap611e1e79-ff", "ovs_interfaceid": "611e1e79-ffb8-4ba9-8718-b57360eaa492", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1805.799720] env[63024]: DEBUG oslo_vmware.api [None req-55114a38-c7d5-4672-b611-8820a7d7a086 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951003, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.170171} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1805.799720] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-55114a38-c7d5-4672-b611-8820a7d7a086 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1805.799720] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-55114a38-c7d5-4672-b611-8820a7d7a086 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1805.799720] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-55114a38-c7d5-4672-b611-8820a7d7a086 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1805.799720] env[63024]: INFO nova.compute.manager [None req-55114a38-c7d5-4672-b611-8820a7d7a086 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Took 0.63 seconds to destroy the instance on the hypervisor. [ 1805.799720] env[63024]: DEBUG oslo.service.loopingcall [None req-55114a38-c7d5-4672-b611-8820a7d7a086 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1805.800280] env[63024]: DEBUG nova.compute.manager [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1805.802133] env[63024]: DEBUG nova.compute.manager [-] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1805.802220] env[63024]: DEBUG nova.network.neutron [-] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1805.836034] env[63024]: DEBUG oslo_vmware.api [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951001, 'name': PowerOnVM_Task, 'duration_secs': 0.603472} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1805.836034] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1805.836324] env[63024]: INFO nova.compute.manager [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] Took 9.63 seconds to spawn the instance on the hypervisor. [ 1805.836361] env[63024]: DEBUG nova.compute.manager [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1805.837097] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f8669a1-9658-4245-996d-143715d689e2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.849588] env[63024]: DEBUG nova.virt.hardware [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1805.849711] env[63024]: DEBUG nova.virt.hardware [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1805.849822] env[63024]: DEBUG nova.virt.hardware [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1805.850021] env[63024]: DEBUG nova.virt.hardware [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1805.851075] env[63024]: DEBUG nova.virt.hardware [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1805.851075] env[63024]: DEBUG nova.virt.hardware [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1805.851075] env[63024]: DEBUG nova.virt.hardware [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1805.851075] env[63024]: DEBUG nova.virt.hardware [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1805.851075] env[63024]: DEBUG nova.virt.hardware [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1805.851075] env[63024]: DEBUG nova.virt.hardware [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1805.851396] env[63024]: DEBUG nova.virt.hardware [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1805.851950] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d1c8da7-2801-4b34-9c41-569d1c72ecc7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.861088] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c831071-f60e-43d8-99eb-66fcad8b21e1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.865897] env[63024]: DEBUG oslo_concurrency.lockutils [req-6dd60a69-98a0-4c36-9e75-a3b875e0c61d req-9041a11a-98f8-4457-9579-c373bcaa76f8 service nova] Releasing lock "refresh_cache-9267e5e4-732d-47f1-8a30-d926a1269fb9" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1805.937377] env[63024]: DEBUG oslo_vmware.api [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b5f305-a574-77e4-a85a-1d1a5550bb82, 'name': SearchDatastore_Task, 'duration_secs': 0.00861} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1805.937637] env[63024]: DEBUG oslo_concurrency.lockutils [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1805.937890] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 9267e5e4-732d-47f1-8a30-d926a1269fb9/9267e5e4-732d-47f1-8a30-d926a1269fb9.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1805.938153] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8bcb8143-06cf-4ab2-ba49-c449abc31021 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1805.944159] env[63024]: DEBUG oslo_vmware.api [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Waiting for the task: (returnval){ [ 1805.944159] env[63024]: value = "task-1951004" [ 1805.944159] env[63024]: _type = "Task" [ 1805.944159] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1805.951925] env[63024]: DEBUG oslo_vmware.api [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Task: {'id': task-1951004, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.059527] env[63024]: DEBUG nova.scheduler.client.report [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1806.220026] env[63024]: DEBUG nova.compute.manager [req-c1f25a15-c637-42cb-9708-dca6ed191c5f req-fba4765b-beb8-4fac-ae91-fe4aa48b18a3 service nova] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Received event network-vif-deleted-ca53b77e-33c4-4a60-b3aa-bc92763eb98e {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1806.220026] env[63024]: INFO nova.compute.manager [req-c1f25a15-c637-42cb-9708-dca6ed191c5f req-fba4765b-beb8-4fac-ae91-fe4aa48b18a3 service nova] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Neutron deleted interface ca53b77e-33c4-4a60-b3aa-bc92763eb98e; detaching it from the instance and deleting it from the info cache [ 1806.220026] env[63024]: DEBUG nova.network.neutron [req-c1f25a15-c637-42cb-9708-dca6ed191c5f req-fba4765b-beb8-4fac-ae91-fe4aa48b18a3 service nova] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1806.231077] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Releasing lock "refresh_cache-1709d916-d0c4-4706-b41b-8b0ed25f3331" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1806.231413] env[63024]: DEBUG nova.compute.manager [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Instance network_info: |[{"id": "611e1e79-ffb8-4ba9-8718-b57360eaa492", "address": "fa:16:3e:f8:21:2c", "network": {"id": "67ab6461-8fa5-4dfd-9267-561a710ae91b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1814728209-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1886be852b01400aaf7a31c8fe5d4d7a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap611e1e79-ff", "ovs_interfaceid": "611e1e79-ffb8-4ba9-8718-b57360eaa492", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1806.232304] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f8:21:2c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'abe48956-848a-4e1f-b1f1-a27baa5390b9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '611e1e79-ffb8-4ba9-8718-b57360eaa492', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1806.240134] env[63024]: DEBUG oslo.service.loopingcall [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1806.241407] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1806.242614] env[63024]: DEBUG nova.compute.manager [req-57c7db4d-c32d-4f15-bc89-6c3893a2bb2d req-ab68eccf-50c1-4180-8070-dd865602f353 service nova] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Received event network-vif-plugged-611e1e79-ffb8-4ba9-8718-b57360eaa492 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1806.242856] env[63024]: DEBUG oslo_concurrency.lockutils [req-57c7db4d-c32d-4f15-bc89-6c3893a2bb2d req-ab68eccf-50c1-4180-8070-dd865602f353 service nova] Acquiring lock "1709d916-d0c4-4706-b41b-8b0ed25f3331-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1806.243124] env[63024]: DEBUG oslo_concurrency.lockutils [req-57c7db4d-c32d-4f15-bc89-6c3893a2bb2d req-ab68eccf-50c1-4180-8070-dd865602f353 service nova] Lock "1709d916-d0c4-4706-b41b-8b0ed25f3331-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1806.243334] env[63024]: DEBUG oslo_concurrency.lockutils [req-57c7db4d-c32d-4f15-bc89-6c3893a2bb2d req-ab68eccf-50c1-4180-8070-dd865602f353 service nova] Lock "1709d916-d0c4-4706-b41b-8b0ed25f3331-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1806.243536] env[63024]: DEBUG nova.compute.manager [req-57c7db4d-c32d-4f15-bc89-6c3893a2bb2d req-ab68eccf-50c1-4180-8070-dd865602f353 service nova] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] No waiting events found dispatching network-vif-plugged-611e1e79-ffb8-4ba9-8718-b57360eaa492 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1806.243732] env[63024]: WARNING nova.compute.manager [req-57c7db4d-c32d-4f15-bc89-6c3893a2bb2d req-ab68eccf-50c1-4180-8070-dd865602f353 service nova] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Received unexpected event network-vif-plugged-611e1e79-ffb8-4ba9-8718-b57360eaa492 for instance with vm_state building and task_state spawning. [ 1806.243924] env[63024]: DEBUG nova.compute.manager [req-57c7db4d-c32d-4f15-bc89-6c3893a2bb2d req-ab68eccf-50c1-4180-8070-dd865602f353 service nova] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Received event network-changed-611e1e79-ffb8-4ba9-8718-b57360eaa492 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1806.244125] env[63024]: DEBUG nova.compute.manager [req-57c7db4d-c32d-4f15-bc89-6c3893a2bb2d req-ab68eccf-50c1-4180-8070-dd865602f353 service nova] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Refreshing instance network info cache due to event network-changed-611e1e79-ffb8-4ba9-8718-b57360eaa492. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1806.244341] env[63024]: DEBUG oslo_concurrency.lockutils [req-57c7db4d-c32d-4f15-bc89-6c3893a2bb2d req-ab68eccf-50c1-4180-8070-dd865602f353 service nova] Acquiring lock "refresh_cache-1709d916-d0c4-4706-b41b-8b0ed25f3331" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1806.244511] env[63024]: DEBUG oslo_concurrency.lockutils [req-57c7db4d-c32d-4f15-bc89-6c3893a2bb2d req-ab68eccf-50c1-4180-8070-dd865602f353 service nova] Acquired lock "refresh_cache-1709d916-d0c4-4706-b41b-8b0ed25f3331" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1806.244691] env[63024]: DEBUG nova.network.neutron [req-57c7db4d-c32d-4f15-bc89-6c3893a2bb2d req-ab68eccf-50c1-4180-8070-dd865602f353 service nova] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Refreshing network info cache for port 611e1e79-ffb8-4ba9-8718-b57360eaa492 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1806.245715] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-494ad4d9-05f9-4007-a077-e8f435f8c6d0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.270106] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1806.270106] env[63024]: value = "task-1951005" [ 1806.270106] env[63024]: _type = "Task" [ 1806.270106] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1806.277651] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951005, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.357634] env[63024]: INFO nova.compute.manager [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] Took 64.22 seconds to build instance. [ 1806.417349] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1806.417667] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1806.417875] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Starting heal instance info cache {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10258}} [ 1806.455517] env[63024]: DEBUG oslo_vmware.api [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Task: {'id': task-1951004, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.545548] env[63024]: DEBUG nova.network.neutron [req-57c7db4d-c32d-4f15-bc89-6c3893a2bb2d req-ab68eccf-50c1-4180-8070-dd865602f353 service nova] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Updated VIF entry in instance network info cache for port 611e1e79-ffb8-4ba9-8718-b57360eaa492. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1806.545935] env[63024]: DEBUG nova.network.neutron [req-57c7db4d-c32d-4f15-bc89-6c3893a2bb2d req-ab68eccf-50c1-4180-8070-dd865602f353 service nova] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Updating instance_info_cache with network_info: [{"id": "611e1e79-ffb8-4ba9-8718-b57360eaa492", "address": "fa:16:3e:f8:21:2c", "network": {"id": "67ab6461-8fa5-4dfd-9267-561a710ae91b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1814728209-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1886be852b01400aaf7a31c8fe5d4d7a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap611e1e79-ff", "ovs_interfaceid": "611e1e79-ffb8-4ba9-8718-b57360eaa492", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1806.565051] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.779s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1806.565836] env[63024]: DEBUG nova.compute.manager [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1806.568898] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4b7fbcda-fc38-488d-a168-e4a73c15d726 tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.427s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1806.569173] env[63024]: DEBUG nova.objects.instance [None req-4b7fbcda-fc38-488d-a168-e4a73c15d726 tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Lazy-loading 'resources' on Instance uuid 5c2efe96-4ac4-4693-9203-43407d768f66 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1806.670402] env[63024]: DEBUG nova.network.neutron [-] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1806.723056] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1be9ee5c-fcf9-4ac6-944f-2d0860b840cf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.732232] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baf642e5-5407-4d65-ad88-7f5f9abfbfda {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.771382] env[63024]: DEBUG nova.compute.manager [req-c1f25a15-c637-42cb-9708-dca6ed191c5f req-fba4765b-beb8-4fac-ae91-fe4aa48b18a3 service nova] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Detach interface failed, port_id=ca53b77e-33c4-4a60-b3aa-bc92763eb98e, reason: Instance 2dd20650-9273-432a-be28-73ccb66c721d could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 1806.781314] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951005, 'name': CreateVM_Task, 'duration_secs': 0.474383} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1806.781637] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1806.782462] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1806.782769] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1806.783192] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1806.784027] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4f3df14-1284-4bb9-804c-3672d990d128 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.788285] env[63024]: DEBUG oslo_vmware.api [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1806.788285] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52c8dd65-f46f-5598-170c-165ee3783037" [ 1806.788285] env[63024]: _type = "Task" [ 1806.788285] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1806.796413] env[63024]: DEBUG oslo_vmware.api [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52c8dd65-f46f-5598-170c-165ee3783037, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1806.860034] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a35a26d7-3055-499f-8969-1e70a04918ff tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "6156ce17-3f29-487a-afc5-2fa0fb7f114c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.178s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1806.957940] env[63024]: DEBUG oslo_vmware.api [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Task: {'id': task-1951004, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.791549} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1806.958355] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 9267e5e4-732d-47f1-8a30-d926a1269fb9/9267e5e4-732d-47f1-8a30-d926a1269fb9.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1806.958679] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1806.959048] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8bd5f86c-24b7-4430-b322-e50e244423d9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1806.962592] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "refresh_cache-f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1806.962795] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquired lock "refresh_cache-f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1806.963036] env[63024]: DEBUG nova.network.neutron [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Forcefully refreshing network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1806.971860] env[63024]: DEBUG oslo_vmware.api [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Waiting for the task: (returnval){ [ 1806.971860] env[63024]: value = "task-1951006" [ 1806.971860] env[63024]: _type = "Task" [ 1806.971860] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1806.984736] env[63024]: DEBUG oslo_vmware.api [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Task: {'id': task-1951006, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.048859] env[63024]: DEBUG oslo_concurrency.lockutils [req-57c7db4d-c32d-4f15-bc89-6c3893a2bb2d req-ab68eccf-50c1-4180-8070-dd865602f353 service nova] Releasing lock "refresh_cache-1709d916-d0c4-4706-b41b-8b0ed25f3331" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1807.073251] env[63024]: DEBUG nova.compute.utils [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1807.078601] env[63024]: DEBUG nova.compute.manager [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1807.078779] env[63024]: DEBUG nova.network.neutron [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1807.144236] env[63024]: DEBUG nova.policy [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd857e115d7f54be58e8e8dbb20a900d5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a5d51c3beec44aecb65ba72dffb42d40', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1807.176392] env[63024]: INFO nova.compute.manager [-] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Took 1.37 seconds to deallocate network for instance. [ 1807.301795] env[63024]: DEBUG oslo_vmware.api [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52c8dd65-f46f-5598-170c-165ee3783037, 'name': SearchDatastore_Task, 'duration_secs': 0.02692} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1807.302206] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1807.302492] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1807.302901] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1807.302901] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1807.303100] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1807.307150] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0495b728-c3b5-4e95-a063-23e020d69476 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.322233] env[63024]: DEBUG nova.network.neutron [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] Successfully updated port: d0439e29-9598-4648-991c-d2aff3b3fcf9 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1807.324080] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1807.325062] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1807.325835] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9bff7d21-bef8-485b-89cc-cb780077b88b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.334308] env[63024]: DEBUG oslo_vmware.api [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1807.334308] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]528b6342-ea04-75f3-8ad6-f9063a10e15b" [ 1807.334308] env[63024]: _type = "Task" [ 1807.334308] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1807.345015] env[63024]: DEBUG oslo_vmware.api [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]528b6342-ea04-75f3-8ad6-f9063a10e15b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.362890] env[63024]: DEBUG nova.compute.manager [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1807.483920] env[63024]: DEBUG oslo_vmware.api [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Task: {'id': task-1951006, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.154266} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1807.483920] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1807.484567] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2849cb0b-d123-4f08-a615-65955093128f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.512940] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] 9267e5e4-732d-47f1-8a30-d926a1269fb9/9267e5e4-732d-47f1-8a30-d926a1269fb9.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1807.515866] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-90fd3e4c-4d21-4d91-9d39-081e8cac7cc0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.537015] env[63024]: DEBUG oslo_vmware.api [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Waiting for the task: (returnval){ [ 1807.537015] env[63024]: value = "task-1951007" [ 1807.537015] env[63024]: _type = "Task" [ 1807.537015] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1807.545347] env[63024]: DEBUG oslo_vmware.api [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Task: {'id': task-1951007, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.556181] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2d75601a-2fc2-445b-a169-a37988d8d4ce tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Acquiring lock "02db92ec-3377-406b-a95c-0022579fa75b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1807.556509] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2d75601a-2fc2-445b-a169-a37988d8d4ce tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Lock "02db92ec-3377-406b-a95c-0022579fa75b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1807.556656] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2d75601a-2fc2-445b-a169-a37988d8d4ce tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Acquiring lock "02db92ec-3377-406b-a95c-0022579fa75b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1807.556841] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2d75601a-2fc2-445b-a169-a37988d8d4ce tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Lock "02db92ec-3377-406b-a95c-0022579fa75b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1807.557064] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2d75601a-2fc2-445b-a169-a37988d8d4ce tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Lock "02db92ec-3377-406b-a95c-0022579fa75b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1807.561377] env[63024]: INFO nova.compute.manager [None req-2d75601a-2fc2-445b-a169-a37988d8d4ce tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] Terminating instance [ 1807.583382] env[63024]: DEBUG nova.compute.manager [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1807.607749] env[63024]: DEBUG nova.network.neutron [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] Successfully created port: 22f19c21-5ea3-4b2f-9b37-fa34262081b9 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1807.690751] env[63024]: DEBUG oslo_concurrency.lockutils [None req-55114a38-c7d5-4672-b611-8820a7d7a086 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1807.721840] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7140b93-4fb4-40a7-b5df-fe76bea1e1e1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.730369] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d04eae99-b64a-44ce-9b6d-bbad3344c7d6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.769125] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98257ecc-15f0-496d-bae5-8da2352e4ff3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.779611] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a7e99a8-fc67-4b8c-88ee-89fc58dc96b2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.791954] env[63024]: DEBUG nova.compute.provider_tree [None req-4b7fbcda-fc38-488d-a168-e4a73c15d726 tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1807.825390] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquiring lock "refresh_cache-b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1807.825390] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquired lock "refresh_cache-b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1807.825390] env[63024]: DEBUG nova.network.neutron [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1807.843779] env[63024]: DEBUG oslo_vmware.api [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]528b6342-ea04-75f3-8ad6-f9063a10e15b, 'name': SearchDatastore_Task, 'duration_secs': 0.032923} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1807.844620] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6be72cfc-b188-4855-8abb-0f923c9b5e8d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1807.849937] env[63024]: DEBUG oslo_vmware.api [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1807.849937] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52915632-0f9d-b764-60e0-73ae0ebbaaf2" [ 1807.849937] env[63024]: _type = "Task" [ 1807.849937] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1807.858350] env[63024]: DEBUG oslo_vmware.api [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52915632-0f9d-b764-60e0-73ae0ebbaaf2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1807.887602] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1808.047090] env[63024]: DEBUG oslo_vmware.api [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Task: {'id': task-1951007, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.069719] env[63024]: DEBUG nova.compute.manager [None req-2d75601a-2fc2-445b-a169-a37988d8d4ce tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1808.069719] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-2d75601a-2fc2-445b-a169-a37988d8d4ce tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1808.070550] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53264838-67c6-4073-acef-7f89cef01a0b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.080453] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d75601a-2fc2-445b-a169-a37988d8d4ce tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1808.080858] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-33b1afe0-bf15-45e7-9c01-6f4c9cde08a7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.086561] env[63024]: DEBUG oslo_vmware.api [None req-2d75601a-2fc2-445b-a169-a37988d8d4ce tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Waiting for the task: (returnval){ [ 1808.086561] env[63024]: value = "task-1951008" [ 1808.086561] env[63024]: _type = "Task" [ 1808.086561] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1808.098325] env[63024]: DEBUG oslo_vmware.api [None req-2d75601a-2fc2-445b-a169-a37988d8d4ce tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Task: {'id': task-1951008, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.156441] env[63024]: DEBUG oslo_concurrency.lockutils [None req-80512235-9f10-4ac5-a6da-8030b793b94d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "6156ce17-3f29-487a-afc5-2fa0fb7f114c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1808.156698] env[63024]: DEBUG oslo_concurrency.lockutils [None req-80512235-9f10-4ac5-a6da-8030b793b94d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "6156ce17-3f29-487a-afc5-2fa0fb7f114c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1808.157309] env[63024]: DEBUG oslo_concurrency.lockutils [None req-80512235-9f10-4ac5-a6da-8030b793b94d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "6156ce17-3f29-487a-afc5-2fa0fb7f114c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1808.157519] env[63024]: DEBUG oslo_concurrency.lockutils [None req-80512235-9f10-4ac5-a6da-8030b793b94d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "6156ce17-3f29-487a-afc5-2fa0fb7f114c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1808.157697] env[63024]: DEBUG oslo_concurrency.lockutils [None req-80512235-9f10-4ac5-a6da-8030b793b94d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "6156ce17-3f29-487a-afc5-2fa0fb7f114c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1808.160036] env[63024]: INFO nova.compute.manager [None req-80512235-9f10-4ac5-a6da-8030b793b94d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] Terminating instance [ 1808.263917] env[63024]: DEBUG nova.compute.manager [req-a4447fb9-987d-4b67-94a6-054a7ef82cec req-bd8bcb1e-f5ad-4023-9b0d-27119482bc4f service nova] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] Received event network-vif-plugged-d0439e29-9598-4648-991c-d2aff3b3fcf9 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1808.264154] env[63024]: DEBUG oslo_concurrency.lockutils [req-a4447fb9-987d-4b67-94a6-054a7ef82cec req-bd8bcb1e-f5ad-4023-9b0d-27119482bc4f service nova] Acquiring lock "b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1808.264357] env[63024]: DEBUG oslo_concurrency.lockutils [req-a4447fb9-987d-4b67-94a6-054a7ef82cec req-bd8bcb1e-f5ad-4023-9b0d-27119482bc4f service nova] Lock "b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1808.264520] env[63024]: DEBUG oslo_concurrency.lockutils [req-a4447fb9-987d-4b67-94a6-054a7ef82cec req-bd8bcb1e-f5ad-4023-9b0d-27119482bc4f service nova] Lock "b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1808.264680] env[63024]: DEBUG nova.compute.manager [req-a4447fb9-987d-4b67-94a6-054a7ef82cec req-bd8bcb1e-f5ad-4023-9b0d-27119482bc4f service nova] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] No waiting events found dispatching network-vif-plugged-d0439e29-9598-4648-991c-d2aff3b3fcf9 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1808.264836] env[63024]: WARNING nova.compute.manager [req-a4447fb9-987d-4b67-94a6-054a7ef82cec req-bd8bcb1e-f5ad-4023-9b0d-27119482bc4f service nova] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] Received unexpected event network-vif-plugged-d0439e29-9598-4648-991c-d2aff3b3fcf9 for instance with vm_state building and task_state spawning. [ 1808.264990] env[63024]: DEBUG nova.compute.manager [req-a4447fb9-987d-4b67-94a6-054a7ef82cec req-bd8bcb1e-f5ad-4023-9b0d-27119482bc4f service nova] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] Received event network-changed-d0439e29-9598-4648-991c-d2aff3b3fcf9 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1808.265412] env[63024]: DEBUG nova.compute.manager [req-a4447fb9-987d-4b67-94a6-054a7ef82cec req-bd8bcb1e-f5ad-4023-9b0d-27119482bc4f service nova] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] Refreshing instance network info cache due to event network-changed-d0439e29-9598-4648-991c-d2aff3b3fcf9. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1808.265593] env[63024]: DEBUG oslo_concurrency.lockutils [req-a4447fb9-987d-4b67-94a6-054a7ef82cec req-bd8bcb1e-f5ad-4023-9b0d-27119482bc4f service nova] Acquiring lock "refresh_cache-b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1808.295011] env[63024]: DEBUG nova.scheduler.client.report [None req-4b7fbcda-fc38-488d-a168-e4a73c15d726 tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1808.360910] env[63024]: DEBUG oslo_vmware.api [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52915632-0f9d-b764-60e0-73ae0ebbaaf2, 'name': SearchDatastore_Task, 'duration_secs': 0.026604} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1808.361619] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1808.361914] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 1709d916-d0c4-4706-b41b-8b0ed25f3331/1709d916-d0c4-4706-b41b-8b0ed25f3331.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1808.362198] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-42b7dd49-2861-4372-a781-13ef0fa97447 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.369353] env[63024]: DEBUG oslo_vmware.api [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1808.369353] env[63024]: value = "task-1951009" [ 1808.369353] env[63024]: _type = "Task" [ 1808.369353] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1808.380530] env[63024]: DEBUG oslo_vmware.api [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951009, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.389315] env[63024]: DEBUG nova.network.neutron [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Updating instance_info_cache with network_info: [{"id": "90fdf5d2-f22d-4b1f-9b65-a0975e5c1cd3", "address": "fa:16:3e:64:8e:6a", "network": {"id": "b22bfe1a-f169-41c3-ac9b-fdcf93268110", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d78401abb63840f4b461856cfdb6dbbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90fdf5d2-f2", "ovs_interfaceid": "90fdf5d2-f22d-4b1f-9b65-a0975e5c1cd3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1808.410134] env[63024]: DEBUG nova.network.neutron [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1808.546853] env[63024]: DEBUG oslo_vmware.api [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Task: {'id': task-1951007, 'name': ReconfigVM_Task, 'duration_secs': 0.53939} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1808.547180] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Reconfigured VM instance instance-00000038 to attach disk [datastore1] 9267e5e4-732d-47f1-8a30-d926a1269fb9/9267e5e4-732d-47f1-8a30-d926a1269fb9.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1808.547492] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Creating Virtual Disk of size 1048576 KB and adapter type paraVirtual on the data store {{(pid=63024) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1383}} [ 1808.548119] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CreateVirtualDisk_Task with opID=oslo.vmware-ce115d51-8b88-4bf0-b970-2f8bc667cd60 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.555542] env[63024]: DEBUG oslo_vmware.api [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Waiting for the task: (returnval){ [ 1808.555542] env[63024]: value = "task-1951010" [ 1808.555542] env[63024]: _type = "Task" [ 1808.555542] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1808.563664] env[63024]: DEBUG oslo_vmware.api [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Task: {'id': task-1951010, 'name': CreateVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.594535] env[63024]: DEBUG nova.compute.manager [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1808.603453] env[63024]: DEBUG oslo_vmware.api [None req-2d75601a-2fc2-445b-a169-a37988d8d4ce tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Task: {'id': task-1951008, 'name': PowerOffVM_Task, 'duration_secs': 0.191683} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1808.603763] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d75601a-2fc2-445b-a169-a37988d8d4ce tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1808.603935] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-2d75601a-2fc2-445b-a169-a37988d8d4ce tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1808.604218] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-12d1cf6c-be7a-4384-a7b6-42801c9f1b58 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.647297] env[63024]: DEBUG nova.virt.hardware [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1808.647610] env[63024]: DEBUG nova.virt.hardware [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1808.647712] env[63024]: DEBUG nova.virt.hardware [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1808.648156] env[63024]: DEBUG nova.virt.hardware [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1808.648156] env[63024]: DEBUG nova.virt.hardware [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1808.648156] env[63024]: DEBUG nova.virt.hardware [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1808.648511] env[63024]: DEBUG nova.virt.hardware [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1808.648583] env[63024]: DEBUG nova.virt.hardware [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1808.648748] env[63024]: DEBUG nova.virt.hardware [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1808.649024] env[63024]: DEBUG nova.virt.hardware [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1808.649103] env[63024]: DEBUG nova.virt.hardware [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1808.649929] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d316f13f-7cd1-4639-a9a7-6c5f86813102 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.653195] env[63024]: DEBUG nova.network.neutron [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] Updating instance_info_cache with network_info: [{"id": "d0439e29-9598-4648-991c-d2aff3b3fcf9", "address": "fa:16:3e:f4:8a:4c", "network": {"id": "a1a2ebf5-0a3e-4f93-9a47-bd8cdab108ce", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1557111633-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5577b40f56af44eebd47761192e9510f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0439e29-95", "ovs_interfaceid": "d0439e29-9598-4648-991c-d2aff3b3fcf9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1808.660152] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66a2944b-e7a7-4b79-adf2-8e31e81d49ca {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.664299] env[63024]: DEBUG nova.compute.manager [None req-80512235-9f10-4ac5-a6da-8030b793b94d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1808.664504] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-80512235-9f10-4ac5-a6da-8030b793b94d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1808.665655] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f5110de-daf2-4413-9fc0-beda52317f8c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.682837] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-80512235-9f10-4ac5-a6da-8030b793b94d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1808.682837] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-33786773-6a9e-48eb-8cf1-7ef017960539 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.688183] env[63024]: DEBUG oslo_vmware.api [None req-80512235-9f10-4ac5-a6da-8030b793b94d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 1808.688183] env[63024]: value = "task-1951012" [ 1808.688183] env[63024]: _type = "Task" [ 1808.688183] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1808.699694] env[63024]: DEBUG oslo_vmware.api [None req-80512235-9f10-4ac5-a6da-8030b793b94d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951012, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.746502] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-2d75601a-2fc2-445b-a169-a37988d8d4ce tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1808.746925] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-2d75601a-2fc2-445b-a169-a37988d8d4ce tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1808.746925] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d75601a-2fc2-445b-a169-a37988d8d4ce tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Deleting the datastore file [datastore1] 02db92ec-3377-406b-a95c-0022579fa75b {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1808.747205] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bdf1c2f6-26ab-4056-a185-721d96283ad6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1808.754557] env[63024]: DEBUG oslo_vmware.api [None req-2d75601a-2fc2-445b-a169-a37988d8d4ce tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Waiting for the task: (returnval){ [ 1808.754557] env[63024]: value = "task-1951013" [ 1808.754557] env[63024]: _type = "Task" [ 1808.754557] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1808.764602] env[63024]: DEBUG oslo_vmware.api [None req-2d75601a-2fc2-445b-a169-a37988d8d4ce tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Task: {'id': task-1951013, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.800665] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4b7fbcda-fc38-488d-a168-e4a73c15d726 tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.232s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1808.804230] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.670s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1808.806617] env[63024]: INFO nova.compute.claims [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1808.877955] env[63024]: INFO nova.scheduler.client.report [None req-4b7fbcda-fc38-488d-a168-e4a73c15d726 tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Deleted allocations for instance 5c2efe96-4ac4-4693-9203-43407d768f66 [ 1808.882343] env[63024]: DEBUG oslo_vmware.api [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951009, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1808.891569] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Releasing lock "refresh_cache-f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1808.891841] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Updated the network info_cache for instance {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10329}} [ 1808.892076] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1808.892483] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1808.892689] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1808.892878] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1808.893043] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1808.893194] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1808.893321] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63024) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10877}} [ 1808.893462] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager.update_available_resource {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1809.067784] env[63024]: DEBUG oslo_vmware.api [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Task: {'id': task-1951010, 'name': CreateVirtualDisk_Task, 'duration_secs': 0.04939} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.067784] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Created Virtual Disk of size 1048576 KB and type thin {{(pid=63024) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1404}} [ 1809.068562] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-880419fe-b234-4d1c-9e71-d480b3455ac3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.094714] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] 9267e5e4-732d-47f1-8a30-d926a1269fb9/ephemeral_0.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1809.095337] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-901c4104-abf3-4a06-9bce-440775e5e6f0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.113469] env[63024]: DEBUG oslo_vmware.api [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Waiting for the task: (returnval){ [ 1809.113469] env[63024]: value = "task-1951014" [ 1809.113469] env[63024]: _type = "Task" [ 1809.113469] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.121722] env[63024]: DEBUG oslo_vmware.api [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Task: {'id': task-1951014, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.156244] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Releasing lock "refresh_cache-b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1809.156244] env[63024]: DEBUG nova.compute.manager [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] Instance network_info: |[{"id": "d0439e29-9598-4648-991c-d2aff3b3fcf9", "address": "fa:16:3e:f4:8a:4c", "network": {"id": "a1a2ebf5-0a3e-4f93-9a47-bd8cdab108ce", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1557111633-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5577b40f56af44eebd47761192e9510f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0439e29-95", "ovs_interfaceid": "d0439e29-9598-4648-991c-d2aff3b3fcf9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1809.156888] env[63024]: DEBUG oslo_concurrency.lockutils [req-a4447fb9-987d-4b67-94a6-054a7ef82cec req-bd8bcb1e-f5ad-4023-9b0d-27119482bc4f service nova] Acquired lock "refresh_cache-b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1809.156888] env[63024]: DEBUG nova.network.neutron [req-a4447fb9-987d-4b67-94a6-054a7ef82cec req-bd8bcb1e-f5ad-4023-9b0d-27119482bc4f service nova] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] Refreshing network info cache for port d0439e29-9598-4648-991c-d2aff3b3fcf9 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1809.157824] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f4:8a:4c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cf5bfbae-a882-4d34-be33-b31e274b3077', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd0439e29-9598-4648-991c-d2aff3b3fcf9', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1809.166711] env[63024]: DEBUG oslo.service.loopingcall [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1809.169872] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1809.170403] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-146477af-50a9-433f-9632-8e2701bfc4cf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.193594] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1809.193594] env[63024]: value = "task-1951015" [ 1809.193594] env[63024]: _type = "Task" [ 1809.193594] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.199770] env[63024]: DEBUG oslo_vmware.api [None req-80512235-9f10-4ac5-a6da-8030b793b94d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951012, 'name': PowerOffVM_Task, 'duration_secs': 0.445914} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.200437] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-80512235-9f10-4ac5-a6da-8030b793b94d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1809.200611] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-80512235-9f10-4ac5-a6da-8030b793b94d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1809.200876] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-47e33a56-e8b2-4c6f-9668-702e0ca32855 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.209193] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951015, 'name': CreateVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.209442] env[63024]: DEBUG nova.compute.manager [req-2fedbaa4-63cc-4dbd-9d18-abd10528b9de req-41580bd6-fb7e-4c94-a8ea-f00786be839a service nova] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] Received event network-vif-plugged-22f19c21-5ea3-4b2f-9b37-fa34262081b9 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1809.209645] env[63024]: DEBUG oslo_concurrency.lockutils [req-2fedbaa4-63cc-4dbd-9d18-abd10528b9de req-41580bd6-fb7e-4c94-a8ea-f00786be839a service nova] Acquiring lock "81f96b5a-b878-4e6c-9683-00528a4d5650-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1809.209872] env[63024]: DEBUG oslo_concurrency.lockutils [req-2fedbaa4-63cc-4dbd-9d18-abd10528b9de req-41580bd6-fb7e-4c94-a8ea-f00786be839a service nova] Lock "81f96b5a-b878-4e6c-9683-00528a4d5650-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1809.210043] env[63024]: DEBUG oslo_concurrency.lockutils [req-2fedbaa4-63cc-4dbd-9d18-abd10528b9de req-41580bd6-fb7e-4c94-a8ea-f00786be839a service nova] Lock "81f96b5a-b878-4e6c-9683-00528a4d5650-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1809.210209] env[63024]: DEBUG nova.compute.manager [req-2fedbaa4-63cc-4dbd-9d18-abd10528b9de req-41580bd6-fb7e-4c94-a8ea-f00786be839a service nova] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] No waiting events found dispatching network-vif-plugged-22f19c21-5ea3-4b2f-9b37-fa34262081b9 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1809.210388] env[63024]: WARNING nova.compute.manager [req-2fedbaa4-63cc-4dbd-9d18-abd10528b9de req-41580bd6-fb7e-4c94-a8ea-f00786be839a service nova] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] Received unexpected event network-vif-plugged-22f19c21-5ea3-4b2f-9b37-fa34262081b9 for instance with vm_state building and task_state spawning. [ 1809.260657] env[63024]: DEBUG nova.network.neutron [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] Successfully updated port: 22f19c21-5ea3-4b2f-9b37-fa34262081b9 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1809.268647] env[63024]: DEBUG oslo_vmware.api [None req-2d75601a-2fc2-445b-a169-a37988d8d4ce tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Task: {'id': task-1951013, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.457643} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.269034] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d75601a-2fc2-445b-a169-a37988d8d4ce tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1809.272231] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-2d75601a-2fc2-445b-a169-a37988d8d4ce tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1809.272231] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-2d75601a-2fc2-445b-a169-a37988d8d4ce tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1809.272231] env[63024]: INFO nova.compute.manager [None req-2d75601a-2fc2-445b-a169-a37988d8d4ce tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1809.272231] env[63024]: DEBUG oslo.service.loopingcall [None req-2d75601a-2fc2-445b-a169-a37988d8d4ce tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1809.272231] env[63024]: DEBUG nova.compute.manager [-] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1809.272231] env[63024]: DEBUG nova.network.neutron [-] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1809.287256] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-80512235-9f10-4ac5-a6da-8030b793b94d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1809.287521] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-80512235-9f10-4ac5-a6da-8030b793b94d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1809.287672] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-80512235-9f10-4ac5-a6da-8030b793b94d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Deleting the datastore file [datastore1] 6156ce17-3f29-487a-afc5-2fa0fb7f114c {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1809.290704] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f1a263b0-69e9-4003-92a5-381b96065a4b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.298017] env[63024]: DEBUG oslo_vmware.api [None req-80512235-9f10-4ac5-a6da-8030b793b94d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 1809.298017] env[63024]: value = "task-1951017" [ 1809.298017] env[63024]: _type = "Task" [ 1809.298017] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.307392] env[63024]: DEBUG oslo_vmware.api [None req-80512235-9f10-4ac5-a6da-8030b793b94d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951017, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.381725] env[63024]: DEBUG oslo_vmware.api [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951009, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.659981} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.382074] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 1709d916-d0c4-4706-b41b-8b0ed25f3331/1709d916-d0c4-4706-b41b-8b0ed25f3331.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1809.382351] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1809.382638] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-96e8ea5a-90e3-4113-9446-162587124087 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.390240] env[63024]: DEBUG oslo_vmware.api [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1809.390240] env[63024]: value = "task-1951018" [ 1809.390240] env[63024]: _type = "Task" [ 1809.390240] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.390689] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4b7fbcda-fc38-488d-a168-e4a73c15d726 tempest-ImagesNegativeTestJSON-293137996 tempest-ImagesNegativeTestJSON-293137996-project-member] Lock "5c2efe96-4ac4-4693-9203-43407d768f66" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.764s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1809.396862] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1809.403129] env[63024]: DEBUG oslo_vmware.api [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951018, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.588275] env[63024]: DEBUG nova.network.neutron [req-a4447fb9-987d-4b67-94a6-054a7ef82cec req-bd8bcb1e-f5ad-4023-9b0d-27119482bc4f service nova] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] Updated VIF entry in instance network info cache for port d0439e29-9598-4648-991c-d2aff3b3fcf9. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1809.588695] env[63024]: DEBUG nova.network.neutron [req-a4447fb9-987d-4b67-94a6-054a7ef82cec req-bd8bcb1e-f5ad-4023-9b0d-27119482bc4f service nova] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] Updating instance_info_cache with network_info: [{"id": "d0439e29-9598-4648-991c-d2aff3b3fcf9", "address": "fa:16:3e:f4:8a:4c", "network": {"id": "a1a2ebf5-0a3e-4f93-9a47-bd8cdab108ce", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1557111633-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5577b40f56af44eebd47761192e9510f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0439e29-95", "ovs_interfaceid": "d0439e29-9598-4648-991c-d2aff3b3fcf9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1809.624741] env[63024]: DEBUG oslo_vmware.api [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Task: {'id': task-1951014, 'name': ReconfigVM_Task, 'duration_secs': 0.3273} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.625072] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Reconfigured VM instance instance-00000038 to attach disk [datastore1] 9267e5e4-732d-47f1-8a30-d926a1269fb9/ephemeral_0.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1809.625685] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a3704774-9384-416b-8e09-d5de9f89d5ac {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.631769] env[63024]: DEBUG oslo_vmware.api [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Waiting for the task: (returnval){ [ 1809.631769] env[63024]: value = "task-1951019" [ 1809.631769] env[63024]: _type = "Task" [ 1809.631769] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.640719] env[63024]: DEBUG oslo_vmware.api [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Task: {'id': task-1951019, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.706184] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951015, 'name': CreateVM_Task, 'duration_secs': 0.385615} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.706358] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1809.707040] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1809.707214] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1809.707525] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1809.707779] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25732556-02a7-45da-801f-43839e1a5cb8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.712523] env[63024]: DEBUG oslo_vmware.api [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1809.712523] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]520ab5d0-e50d-617c-8ea8-60c251cc1ddb" [ 1809.712523] env[63024]: _type = "Task" [ 1809.712523] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.720653] env[63024]: DEBUG oslo_vmware.api [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]520ab5d0-e50d-617c-8ea8-60c251cc1ddb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1809.764024] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Acquiring lock "refresh_cache-81f96b5a-b878-4e6c-9683-00528a4d5650" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1809.764024] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Acquired lock "refresh_cache-81f96b5a-b878-4e6c-9683-00528a4d5650" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1809.764024] env[63024]: DEBUG nova.network.neutron [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1809.811583] env[63024]: DEBUG oslo_vmware.api [None req-80512235-9f10-4ac5-a6da-8030b793b94d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951017, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.211575} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.811871] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-80512235-9f10-4ac5-a6da-8030b793b94d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1809.812087] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-80512235-9f10-4ac5-a6da-8030b793b94d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1809.812195] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-80512235-9f10-4ac5-a6da-8030b793b94d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1809.812444] env[63024]: INFO nova.compute.manager [None req-80512235-9f10-4ac5-a6da-8030b793b94d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1809.812599] env[63024]: DEBUG oslo.service.loopingcall [None req-80512235-9f10-4ac5-a6da-8030b793b94d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1809.812844] env[63024]: DEBUG nova.compute.manager [-] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1809.812844] env[63024]: DEBUG nova.network.neutron [-] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1809.901531] env[63024]: DEBUG oslo_vmware.api [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951018, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.105089} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1809.907426] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1809.908716] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8718de5-42fd-4f69-ab65-fb14e1ca7542 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.938096] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Reconfiguring VM instance instance-00000039 to attach disk [datastore1] 1709d916-d0c4-4706-b41b-8b0ed25f3331/1709d916-d0c4-4706-b41b-8b0ed25f3331.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1809.944216] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6d271934-ea81-4f3f-84f9-208dd3fd5342 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1809.961872] env[63024]: DEBUG oslo_vmware.api [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1809.961872] env[63024]: value = "task-1951020" [ 1809.961872] env[63024]: _type = "Task" [ 1809.961872] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1809.971666] env[63024]: DEBUG oslo_vmware.api [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951020, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.095910] env[63024]: DEBUG oslo_concurrency.lockutils [req-a4447fb9-987d-4b67-94a6-054a7ef82cec req-bd8bcb1e-f5ad-4023-9b0d-27119482bc4f service nova] Releasing lock "refresh_cache-b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1810.105198] env[63024]: DEBUG nova.network.neutron [-] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1810.151181] env[63024]: DEBUG oslo_vmware.api [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Task: {'id': task-1951019, 'name': Rename_Task, 'duration_secs': 0.335892} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1810.151462] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1810.151877] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a8b60844-d8dd-4ad4-a1bb-593abac10524 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.161877] env[63024]: DEBUG oslo_vmware.api [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Waiting for the task: (returnval){ [ 1810.161877] env[63024]: value = "task-1951021" [ 1810.161877] env[63024]: _type = "Task" [ 1810.161877] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1810.173776] env[63024]: DEBUG oslo_vmware.api [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Task: {'id': task-1951021, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.230054] env[63024]: DEBUG oslo_vmware.api [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]520ab5d0-e50d-617c-8ea8-60c251cc1ddb, 'name': SearchDatastore_Task, 'duration_secs': 0.009591} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1810.231698] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1810.231698] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1810.231698] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1810.231698] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1810.231967] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1810.232339] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-29b5b987-1702-4755-9bf7-d698d107789a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.241971] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1810.242258] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1810.242924] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-751062db-62e9-4f4b-849e-09a7439fb5ac {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.248393] env[63024]: DEBUG oslo_vmware.api [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1810.248393] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52231a2f-64b2-247c-233e-5fd4c3905401" [ 1810.248393] env[63024]: _type = "Task" [ 1810.248393] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1810.258337] env[63024]: DEBUG oslo_vmware.api [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52231a2f-64b2-247c-233e-5fd4c3905401, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.304225] env[63024]: DEBUG nova.network.neutron [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1810.415433] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64a904d3-a37f-4ce3-bd32-af04fbd36b63 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.423977] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9dcbbaa-08c8-4734-8f67-83a1c0ec8039 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.456948] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5ce478b-2ed9-4ba9-930f-c9badf5d880f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.464346] env[63024]: DEBUG nova.network.neutron [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] Updating instance_info_cache with network_info: [{"id": "22f19c21-5ea3-4b2f-9b37-fa34262081b9", "address": "fa:16:3e:49:c9:2e", "network": {"id": "19563e44-3f43-4aee-8e65-abfb07528a6a", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-310580499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5d51c3beec44aecb65ba72dffb42d40", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "03ac2c9c-6ad2-4a85-bfab-c7e336df859a", "external-id": "nsx-vlan-transportzone-379", "segmentation_id": 379, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap22f19c21-5e", "ovs_interfaceid": "22f19c21-5ea3-4b2f-9b37-fa34262081b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1810.474088] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9fe2d2f-56e7-4c65-ae8e-53618fcd97c9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.494008] env[63024]: DEBUG oslo_vmware.api [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951020, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.494576] env[63024]: DEBUG nova.compute.provider_tree [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1810.610531] env[63024]: INFO nova.compute.manager [-] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] Took 1.34 seconds to deallocate network for instance. [ 1810.674170] env[63024]: DEBUG oslo_vmware.api [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Task: {'id': task-1951021, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.717026] env[63024]: DEBUG nova.network.neutron [-] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1810.759853] env[63024]: DEBUG oslo_vmware.api [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52231a2f-64b2-247c-233e-5fd4c3905401, 'name': SearchDatastore_Task, 'duration_secs': 0.009683} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1810.761025] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a00c7c5e-883d-4bcb-bf8c-247c73a61bcc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.766439] env[63024]: DEBUG oslo_vmware.api [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1810.766439] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52335b23-4f19-e951-b6fe-8745276f885a" [ 1810.766439] env[63024]: _type = "Task" [ 1810.766439] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1810.779997] env[63024]: DEBUG oslo_vmware.api [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52335b23-4f19-e951-b6fe-8745276f885a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1810.973399] env[63024]: DEBUG oslo_vmware.api [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951020, 'name': ReconfigVM_Task, 'duration_secs': 0.611799} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1810.973755] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Reconfigured VM instance instance-00000039 to attach disk [datastore1] 1709d916-d0c4-4706-b41b-8b0ed25f3331/1709d916-d0c4-4706-b41b-8b0ed25f3331.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1810.974416] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-33b05787-176e-4799-98ed-afcdd4c81cf1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1810.979630] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Releasing lock "refresh_cache-81f96b5a-b878-4e6c-9683-00528a4d5650" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1810.980199] env[63024]: DEBUG nova.compute.manager [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] Instance network_info: |[{"id": "22f19c21-5ea3-4b2f-9b37-fa34262081b9", "address": "fa:16:3e:49:c9:2e", "network": {"id": "19563e44-3f43-4aee-8e65-abfb07528a6a", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-310580499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5d51c3beec44aecb65ba72dffb42d40", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "03ac2c9c-6ad2-4a85-bfab-c7e336df859a", "external-id": "nsx-vlan-transportzone-379", "segmentation_id": 379, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap22f19c21-5e", "ovs_interfaceid": "22f19c21-5ea3-4b2f-9b37-fa34262081b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1810.982660] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:49:c9:2e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '03ac2c9c-6ad2-4a85-bfab-c7e336df859a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '22f19c21-5ea3-4b2f-9b37-fa34262081b9', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1810.989692] env[63024]: DEBUG oslo.service.loopingcall [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1810.990023] env[63024]: DEBUG oslo_vmware.api [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1810.990023] env[63024]: value = "task-1951022" [ 1810.990023] env[63024]: _type = "Task" [ 1810.990023] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1810.990368] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1810.990679] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-31d8316f-5a3c-4714-b9d3-53dba721b727 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.012324] env[63024]: DEBUG nova.scheduler.client.report [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1811.021412] env[63024]: DEBUG oslo_vmware.api [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951022, 'name': Rename_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.022830] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1811.022830] env[63024]: value = "task-1951023" [ 1811.022830] env[63024]: _type = "Task" [ 1811.022830] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1811.031197] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951023, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.115494] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2d75601a-2fc2-445b-a169-a37988d8d4ce tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1811.174450] env[63024]: DEBUG oslo_vmware.api [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Task: {'id': task-1951021, 'name': PowerOnVM_Task, 'duration_secs': 0.596684} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1811.174751] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1811.174954] env[63024]: INFO nova.compute.manager [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Took 10.81 seconds to spawn the instance on the hypervisor. [ 1811.175156] env[63024]: DEBUG nova.compute.manager [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1811.175890] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65757351-efc8-40d1-8bb0-860b655d8cb1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.216639] env[63024]: INFO nova.compute.manager [-] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] Took 1.40 seconds to deallocate network for instance. [ 1811.277619] env[63024]: DEBUG oslo_vmware.api [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52335b23-4f19-e951-b6fe-8745276f885a, 'name': SearchDatastore_Task, 'duration_secs': 0.010076} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1811.278215] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1811.278694] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4/b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1811.279236] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4b6ce979-74e2-49f1-b039-3cf8d112e493 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.286921] env[63024]: DEBUG oslo_vmware.api [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1811.286921] env[63024]: value = "task-1951024" [ 1811.286921] env[63024]: _type = "Task" [ 1811.286921] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1811.298149] env[63024]: DEBUG oslo_vmware.api [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951024, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.332453] env[63024]: DEBUG nova.compute.manager [req-ef5520d3-ba5e-4410-be29-6faf094dc4db req-5094db8e-31b6-4562-83ec-ce5a1a11dab3 service nova] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] Received event network-changed-22f19c21-5ea3-4b2f-9b37-fa34262081b9 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1811.332743] env[63024]: DEBUG nova.compute.manager [req-ef5520d3-ba5e-4410-be29-6faf094dc4db req-5094db8e-31b6-4562-83ec-ce5a1a11dab3 service nova] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] Refreshing instance network info cache due to event network-changed-22f19c21-5ea3-4b2f-9b37-fa34262081b9. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1811.332976] env[63024]: DEBUG oslo_concurrency.lockutils [req-ef5520d3-ba5e-4410-be29-6faf094dc4db req-5094db8e-31b6-4562-83ec-ce5a1a11dab3 service nova] Acquiring lock "refresh_cache-81f96b5a-b878-4e6c-9683-00528a4d5650" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1811.333184] env[63024]: DEBUG oslo_concurrency.lockutils [req-ef5520d3-ba5e-4410-be29-6faf094dc4db req-5094db8e-31b6-4562-83ec-ce5a1a11dab3 service nova] Acquired lock "refresh_cache-81f96b5a-b878-4e6c-9683-00528a4d5650" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1811.333301] env[63024]: DEBUG nova.network.neutron [req-ef5520d3-ba5e-4410-be29-6faf094dc4db req-5094db8e-31b6-4562-83ec-ce5a1a11dab3 service nova] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] Refreshing network info cache for port 22f19c21-5ea3-4b2f-9b37-fa34262081b9 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1811.502262] env[63024]: DEBUG oslo_vmware.api [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951022, 'name': Rename_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.517387] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.713s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1811.517993] env[63024]: DEBUG nova.compute.manager [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1811.520947] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.358s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1811.522550] env[63024]: INFO nova.compute.claims [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1811.543031] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951023, 'name': CreateVM_Task, 'duration_secs': 0.361517} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1811.543031] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1811.543826] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1811.543937] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1811.544408] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1811.544718] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f88c3c80-08e0-4ef6-9456-f9186d928fd0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.553055] env[63024]: DEBUG oslo_vmware.api [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Waiting for the task: (returnval){ [ 1811.553055] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]528f9607-bb0f-f4e6-d5cb-9e94f8acf610" [ 1811.553055] env[63024]: _type = "Task" [ 1811.553055] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1811.563204] env[63024]: DEBUG oslo_vmware.api [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]528f9607-bb0f-f4e6-d5cb-9e94f8acf610, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1811.695024] env[63024]: INFO nova.compute.manager [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Took 65.52 seconds to build instance. [ 1811.724830] env[63024]: DEBUG oslo_concurrency.lockutils [None req-80512235-9f10-4ac5-a6da-8030b793b94d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1811.798772] env[63024]: DEBUG oslo_vmware.api [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951024, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.503329} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1811.799076] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4/b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1811.799724] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1811.799724] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-aad178fd-090c-488d-bde2-1fddc6aeee2a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1811.806947] env[63024]: DEBUG oslo_vmware.api [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1811.806947] env[63024]: value = "task-1951025" [ 1811.806947] env[63024]: _type = "Task" [ 1811.806947] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1811.813615] env[63024]: DEBUG oslo_vmware.api [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951025, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.003365] env[63024]: DEBUG oslo_vmware.api [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951022, 'name': Rename_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.027548] env[63024]: DEBUG nova.compute.utils [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1812.030773] env[63024]: DEBUG nova.compute.manager [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1812.030964] env[63024]: DEBUG nova.network.neutron [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1812.064267] env[63024]: DEBUG oslo_vmware.api [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]528f9607-bb0f-f4e6-d5cb-9e94f8acf610, 'name': SearchDatastore_Task, 'duration_secs': 0.072035} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1812.064786] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1812.064786] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1812.064985] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1812.065175] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1812.065358] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1812.065639] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ae7d0fb9-d02e-4f0b-bbc4-ca81b7e25380 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.074881] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1812.075088] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1812.075814] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6c78198-12ec-4e08-b690-4d26d3aa94d7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.083374] env[63024]: DEBUG oslo_vmware.api [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Waiting for the task: (returnval){ [ 1812.083374] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52ba36c7-613b-4744-4d31-2481e78a9e5a" [ 1812.083374] env[63024]: _type = "Task" [ 1812.083374] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1812.091396] env[63024]: DEBUG oslo_vmware.api [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52ba36c7-613b-4744-4d31-2481e78a9e5a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.119462] env[63024]: DEBUG nova.policy [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c15b1302495a4cf781e5b9abae4f462f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a158ec715843423e8f013939e0071c71', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1812.128500] env[63024]: DEBUG nova.network.neutron [req-ef5520d3-ba5e-4410-be29-6faf094dc4db req-5094db8e-31b6-4562-83ec-ce5a1a11dab3 service nova] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] Updated VIF entry in instance network info cache for port 22f19c21-5ea3-4b2f-9b37-fa34262081b9. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1812.128936] env[63024]: DEBUG nova.network.neutron [req-ef5520d3-ba5e-4410-be29-6faf094dc4db req-5094db8e-31b6-4562-83ec-ce5a1a11dab3 service nova] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] Updating instance_info_cache with network_info: [{"id": "22f19c21-5ea3-4b2f-9b37-fa34262081b9", "address": "fa:16:3e:49:c9:2e", "network": {"id": "19563e44-3f43-4aee-8e65-abfb07528a6a", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-310580499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a5d51c3beec44aecb65ba72dffb42d40", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "03ac2c9c-6ad2-4a85-bfab-c7e336df859a", "external-id": "nsx-vlan-transportzone-379", "segmentation_id": 379, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap22f19c21-5e", "ovs_interfaceid": "22f19c21-5ea3-4b2f-9b37-fa34262081b9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1812.197343] env[63024]: DEBUG oslo_concurrency.lockutils [None req-75959b2a-3b39-40de-8e44-539ffe0e82c6 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Lock "9267e5e4-732d-47f1-8a30-d926a1269fb9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 72.809s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1812.318556] env[63024]: DEBUG oslo_vmware.api [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951025, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064115} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1812.318676] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1812.319451] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a09a850-72f2-462c-b7c1-f8382e73a84a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.344271] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4/b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1812.344556] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-db2183a6-043e-4d3e-b710-551eabb39d33 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.365215] env[63024]: DEBUG oslo_vmware.api [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1812.365215] env[63024]: value = "task-1951026" [ 1812.365215] env[63024]: _type = "Task" [ 1812.365215] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1812.372454] env[63024]: DEBUG oslo_vmware.api [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951026, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.506128] env[63024]: DEBUG oslo_vmware.api [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951022, 'name': Rename_Task, 'duration_secs': 1.166446} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1812.506585] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1812.507709] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9d865276-88de-4178-bf9d-d10445e26d4a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.515198] env[63024]: DEBUG oslo_vmware.api [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1812.515198] env[63024]: value = "task-1951027" [ 1812.515198] env[63024]: _type = "Task" [ 1812.515198] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1812.525612] env[63024]: DEBUG oslo_vmware.api [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951027, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.533728] env[63024]: DEBUG nova.compute.manager [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1812.598031] env[63024]: DEBUG oslo_vmware.api [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52ba36c7-613b-4744-4d31-2481e78a9e5a, 'name': SearchDatastore_Task, 'duration_secs': 0.044418} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1812.604020] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67280fa7-3e41-46b9-8e34-021385156dc5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.615021] env[63024]: DEBUG oslo_vmware.api [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Waiting for the task: (returnval){ [ 1812.615021] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52accb26-5a0b-4466-cd17-27e10750dce4" [ 1812.615021] env[63024]: _type = "Task" [ 1812.615021] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1812.631144] env[63024]: DEBUG oslo_concurrency.lockutils [req-ef5520d3-ba5e-4410-be29-6faf094dc4db req-5094db8e-31b6-4562-83ec-ce5a1a11dab3 service nova] Releasing lock "refresh_cache-81f96b5a-b878-4e6c-9683-00528a4d5650" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1812.631593] env[63024]: DEBUG nova.compute.manager [req-ef5520d3-ba5e-4410-be29-6faf094dc4db req-5094db8e-31b6-4562-83ec-ce5a1a11dab3 service nova] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] Received event network-vif-deleted-16b0c4a3-52c9-4522-a517-2dc2dc94eac5 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1812.631907] env[63024]: DEBUG nova.compute.manager [req-ef5520d3-ba5e-4410-be29-6faf094dc4db req-5094db8e-31b6-4562-83ec-ce5a1a11dab3 service nova] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] Received event network-vif-deleted-15f01f95-afb7-4613-899d-bce865e8ac82 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1812.699834] env[63024]: DEBUG nova.compute.manager [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1812.963733] env[63024]: DEBUG oslo_vmware.api [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951026, 'name': ReconfigVM_Task, 'duration_secs': 0.296108} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1812.973030] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] Reconfigured VM instance instance-0000003a to attach disk [datastore1] b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4/b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1812.973030] env[63024]: DEBUG oslo_vmware.api [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52accb26-5a0b-4466-cd17-27e10750dce4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1812.973030] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cd7f8957-cc1a-4e55-b412-0ff4bde16da0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1812.981469] env[63024]: DEBUG oslo_vmware.api [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1812.981469] env[63024]: value = "task-1951028" [ 1812.981469] env[63024]: _type = "Task" [ 1812.981469] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1812.992324] env[63024]: DEBUG oslo_vmware.api [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951028, 'name': Rename_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.029459] env[63024]: DEBUG oslo_vmware.api [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951027, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.124117] env[63024]: DEBUG oslo_vmware.api [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52accb26-5a0b-4466-cd17-27e10750dce4, 'name': SearchDatastore_Task, 'duration_secs': 0.010006} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1813.124387] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1813.124641] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 81f96b5a-b878-4e6c-9683-00528a4d5650/81f96b5a-b878-4e6c-9683-00528a4d5650.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1813.124894] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c3c83d5b-e9df-4b45-9be9-67bcddd4a555 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.138688] env[63024]: DEBUG oslo_vmware.api [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Waiting for the task: (returnval){ [ 1813.138688] env[63024]: value = "task-1951029" [ 1813.138688] env[63024]: _type = "Task" [ 1813.138688] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1813.148093] env[63024]: DEBUG oslo_vmware.api [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': task-1951029, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.197116] env[63024]: DEBUG nova.network.neutron [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Successfully created port: 4f25b42b-a210-4630-9dc5-b2e92c31b4f5 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1813.489428] env[63024]: DEBUG oslo_concurrency.lockutils [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1813.493698] env[63024]: DEBUG oslo_vmware.api [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951028, 'name': Rename_Task, 'duration_secs': 0.143197} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1813.494024] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1813.494404] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-750e5766-92d9-4079-b787-044fba363b06 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.501016] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f050e16-a8fc-4bb5-a255-510411f5ddb8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.504894] env[63024]: DEBUG oslo_vmware.api [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1813.504894] env[63024]: value = "task-1951030" [ 1813.504894] env[63024]: _type = "Task" [ 1813.504894] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1813.515998] env[63024]: DEBUG oslo_vmware.api [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951030, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1813.523324] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc39930e-c11c-49eb-bb0f-9a59835c7e38 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.533060] env[63024]: DEBUG oslo_vmware.api [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951027, 'name': PowerOnVM_Task, 'duration_secs': 0.810861} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1813.566293] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1813.566581] env[63024]: INFO nova.compute.manager [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Took 10.93 seconds to spawn the instance on the hypervisor. [ 1813.566766] env[63024]: DEBUG nova.compute.manager [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1813.567961] env[63024]: DEBUG nova.compute.manager [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1813.572198] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95ab201b-6a6a-43c8-b95e-0e9f026ececa {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.576022] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50b840e5-083d-4325-95dd-9f016450e709 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.585090] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b084be6-3009-4015-9c8f-256c3cd8c72e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.606308] env[63024]: DEBUG nova.compute.provider_tree [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1813.618436] env[63024]: DEBUG nova.virt.hardware [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1813.618628] env[63024]: DEBUG nova.virt.hardware [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1813.618798] env[63024]: DEBUG nova.virt.hardware [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1813.619017] env[63024]: DEBUG nova.virt.hardware [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1813.619188] env[63024]: DEBUG nova.virt.hardware [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1813.619332] env[63024]: DEBUG nova.virt.hardware [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1813.623248] env[63024]: DEBUG nova.virt.hardware [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1813.623248] env[63024]: DEBUG nova.virt.hardware [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1813.623248] env[63024]: DEBUG nova.virt.hardware [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1813.623248] env[63024]: DEBUG nova.virt.hardware [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1813.623248] env[63024]: DEBUG nova.virt.hardware [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1813.623248] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19dc7f54-db41-43b7-848e-72710a781dbe {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.630508] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01ead815-d7c6-4e08-b7e0-5f6ca03ac549 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.656219] env[63024]: DEBUG oslo_vmware.api [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': task-1951029, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.478326} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1813.656219] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 81f96b5a-b878-4e6c-9683-00528a4d5650/81f96b5a-b878-4e6c-9683-00528a4d5650.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1813.656219] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1813.656219] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d73ebc1b-54aa-497c-9dd8-7ec5d362576e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1813.662061] env[63024]: DEBUG oslo_vmware.api [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Waiting for the task: (returnval){ [ 1813.662061] env[63024]: value = "task-1951031" [ 1813.662061] env[63024]: _type = "Task" [ 1813.662061] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1813.670987] env[63024]: DEBUG oslo_vmware.api [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': task-1951031, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.025025] env[63024]: DEBUG oslo_vmware.api [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951030, 'name': PowerOnVM_Task, 'duration_secs': 0.494993} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1814.025025] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1814.025025] env[63024]: INFO nova.compute.manager [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] Took 8.22 seconds to spawn the instance on the hypervisor. [ 1814.025025] env[63024]: DEBUG nova.compute.manager [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1814.025025] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5126c9b-5435-492f-a2d5-99daae776248 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.102057] env[63024]: INFO nova.compute.manager [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Took 43.72 seconds to build instance. [ 1814.109920] env[63024]: DEBUG nova.scheduler.client.report [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1814.176767] env[63024]: DEBUG oslo_vmware.api [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': task-1951031, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063543} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1814.177075] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1814.177847] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79aa4f2c-44e0-4cd4-8c7c-f9184fa5e4cf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.204402] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] Reconfiguring VM instance instance-0000003b to attach disk [datastore1] 81f96b5a-b878-4e6c-9683-00528a4d5650/81f96b5a-b878-4e6c-9683-00528a4d5650.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1814.204613] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e3046a4c-4450-4c9c-bdcf-590318898f54 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.224817] env[63024]: DEBUG oslo_vmware.api [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Waiting for the task: (returnval){ [ 1814.224817] env[63024]: value = "task-1951032" [ 1814.224817] env[63024]: _type = "Task" [ 1814.224817] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.232667] env[63024]: DEBUG oslo_vmware.api [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': task-1951032, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1814.550243] env[63024]: INFO nova.compute.manager [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] Took 40.76 seconds to build instance. [ 1814.609790] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c34f678d-937a-4aa4-b385-c9ecbd533cc6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "1709d916-d0c4-4706-b41b-8b0ed25f3331" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.378s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1814.616653] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.096s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1814.617109] env[63024]: DEBUG nova.compute.manager [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1814.622356] env[63024]: DEBUG oslo_concurrency.lockutils [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.004s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1814.625167] env[63024]: INFO nova.compute.claims [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1814.736265] env[63024]: DEBUG oslo_vmware.api [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': task-1951032, 'name': ReconfigVM_Task, 'duration_secs': 0.268135} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1814.736487] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] Reconfigured VM instance instance-0000003b to attach disk [datastore1] 81f96b5a-b878-4e6c-9683-00528a4d5650/81f96b5a-b878-4e6c-9683-00528a4d5650.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1814.737107] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-65f2d129-575e-49dc-9c71-f067eddcad76 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1814.744358] env[63024]: DEBUG oslo_vmware.api [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Waiting for the task: (returnval){ [ 1814.744358] env[63024]: value = "task-1951033" [ 1814.744358] env[63024]: _type = "Task" [ 1814.744358] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1814.753629] env[63024]: DEBUG oslo_vmware.api [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': task-1951033, 'name': Rename_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.052229] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5ee27795-7022-4027-af90-35daf0d8eecf tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Lock "b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.325s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1815.090584] env[63024]: DEBUG nova.compute.manager [req-d6a85917-0b6a-48ed-85d4-0365b86269f9 req-ae7f667e-af81-4cf8-bfb0-9c8b52a75f6a service nova] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Received event network-changed-f4b23f8c-5413-42ca-abeb-eda669ea2fe5 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1815.090783] env[63024]: DEBUG nova.compute.manager [req-d6a85917-0b6a-48ed-85d4-0365b86269f9 req-ae7f667e-af81-4cf8-bfb0-9c8b52a75f6a service nova] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Refreshing instance network info cache due to event network-changed-f4b23f8c-5413-42ca-abeb-eda669ea2fe5. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1815.091088] env[63024]: DEBUG oslo_concurrency.lockutils [req-d6a85917-0b6a-48ed-85d4-0365b86269f9 req-ae7f667e-af81-4cf8-bfb0-9c8b52a75f6a service nova] Acquiring lock "refresh_cache-9267e5e4-732d-47f1-8a30-d926a1269fb9" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1815.091265] env[63024]: DEBUG oslo_concurrency.lockutils [req-d6a85917-0b6a-48ed-85d4-0365b86269f9 req-ae7f667e-af81-4cf8-bfb0-9c8b52a75f6a service nova] Acquired lock "refresh_cache-9267e5e4-732d-47f1-8a30-d926a1269fb9" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1815.091717] env[63024]: DEBUG nova.network.neutron [req-d6a85917-0b6a-48ed-85d4-0365b86269f9 req-ae7f667e-af81-4cf8-bfb0-9c8b52a75f6a service nova] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Refreshing network info cache for port f4b23f8c-5413-42ca-abeb-eda669ea2fe5 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1815.115711] env[63024]: DEBUG nova.compute.manager [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1815.132909] env[63024]: DEBUG nova.compute.utils [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1815.136603] env[63024]: DEBUG nova.compute.manager [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1815.136603] env[63024]: DEBUG nova.network.neutron [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1815.188220] env[63024]: DEBUG nova.policy [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c15b1302495a4cf781e5b9abae4f462f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a158ec715843423e8f013939e0071c71', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1815.254115] env[63024]: DEBUG oslo_vmware.api [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': task-1951033, 'name': Rename_Task, 'duration_secs': 0.148343} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1815.254400] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1815.254641] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b8bbc6d2-4cd3-4043-a6fe-e012477ea502 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.261290] env[63024]: DEBUG oslo_vmware.api [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Waiting for the task: (returnval){ [ 1815.261290] env[63024]: value = "task-1951034" [ 1815.261290] env[63024]: _type = "Task" [ 1815.261290] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1815.268854] env[63024]: DEBUG oslo_vmware.api [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': task-1951034, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1815.352723] env[63024]: DEBUG nova.network.neutron [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Successfully updated port: 4f25b42b-a210-4630-9dc5-b2e92c31b4f5 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1815.470060] env[63024]: DEBUG nova.network.neutron [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] Successfully created port: 23911ba0-0750-48de-9e80-03a0356b0496 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1815.554740] env[63024]: DEBUG nova.compute.manager [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1815.641056] env[63024]: DEBUG nova.compute.manager [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1815.658635] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1815.772089] env[63024]: DEBUG oslo_vmware.api [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': task-1951034, 'name': PowerOnVM_Task, 'duration_secs': 0.448562} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1815.772089] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1815.772089] env[63024]: INFO nova.compute.manager [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] Took 7.18 seconds to spawn the instance on the hypervisor. [ 1815.772395] env[63024]: DEBUG nova.compute.manager [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1815.774038] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f2043f1-5add-4eaa-b0df-117c2ccb87a7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1815.855821] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Acquiring lock "refresh_cache-ac60546a-37b2-4d2a-8505-61fe202e2ed0" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1815.856031] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Acquired lock "refresh_cache-ac60546a-37b2-4d2a-8505-61fe202e2ed0" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1815.856142] env[63024]: DEBUG nova.network.neutron [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1815.983456] env[63024]: DEBUG nova.network.neutron [req-d6a85917-0b6a-48ed-85d4-0365b86269f9 req-ae7f667e-af81-4cf8-bfb0-9c8b52a75f6a service nova] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Updated VIF entry in instance network info cache for port f4b23f8c-5413-42ca-abeb-eda669ea2fe5. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1815.983812] env[63024]: DEBUG nova.network.neutron [req-d6a85917-0b6a-48ed-85d4-0365b86269f9 req-ae7f667e-af81-4cf8-bfb0-9c8b52a75f6a service nova] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Updating instance_info_cache with network_info: [{"id": "f4b23f8c-5413-42ca-abeb-eda669ea2fe5", "address": "fa:16:3e:05:a4:75", "network": {"id": "1c8709f7-097f-4437-bae1-c45d9a4d02f1", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1121041191-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.216", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9981ec11228244fd8b75ee951a940c85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0248a27a-1d7f-4195-987b-06bfc8467347", "external-id": "nsx-vlan-transportzone-26", "segmentation_id": 26, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf4b23f8c-54", "ovs_interfaceid": "f4b23f8c-5413-42ca-abeb-eda669ea2fe5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1816.079081] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1816.145651] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-435aa5c9-633a-4e5c-a86a-eb3ac81c0766 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.157619] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-026e242f-ae97-459b-9af4-cd5a5fe86eee {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.188831] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00b7367b-ecfc-4126-a8ae-9aa26b9dcbf3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.197557] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dfd6dd9-44d6-4532-bc3b-93a89df6b9ee {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.212997] env[63024]: DEBUG nova.compute.provider_tree [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1816.296341] env[63024]: INFO nova.compute.manager [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] Took 41.13 seconds to build instance. [ 1816.421249] env[63024]: DEBUG nova.network.neutron [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1816.487492] env[63024]: DEBUG oslo_concurrency.lockutils [req-d6a85917-0b6a-48ed-85d4-0365b86269f9 req-ae7f667e-af81-4cf8-bfb0-9c8b52a75f6a service nova] Releasing lock "refresh_cache-9267e5e4-732d-47f1-8a30-d926a1269fb9" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1816.564669] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "0f371c69-c7ae-4649-b038-be82e8ca74e1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1816.564669] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "0f371c69-c7ae-4649-b038-be82e8ca74e1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1816.655766] env[63024]: DEBUG nova.compute.manager [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1816.693550] env[63024]: DEBUG nova.network.neutron [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Updating instance_info_cache with network_info: [{"id": "4f25b42b-a210-4630-9dc5-b2e92c31b4f5", "address": "fa:16:3e:40:14:8d", "network": {"id": "3ece47b4-8cf7-4658-bdd0-f49089890e34", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-339226542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a158ec715843423e8f013939e0071c71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f25b42b-a2", "ovs_interfaceid": "4f25b42b-a210-4630-9dc5-b2e92c31b4f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1816.707030] env[63024]: DEBUG nova.virt.hardware [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1816.707030] env[63024]: DEBUG nova.virt.hardware [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1816.707030] env[63024]: DEBUG nova.virt.hardware [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1816.707030] env[63024]: DEBUG nova.virt.hardware [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1816.707030] env[63024]: DEBUG nova.virt.hardware [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1816.707030] env[63024]: DEBUG nova.virt.hardware [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1816.707738] env[63024]: DEBUG nova.virt.hardware [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1816.708178] env[63024]: DEBUG nova.virt.hardware [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1816.708532] env[63024]: DEBUG nova.virt.hardware [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1816.708837] env[63024]: DEBUG nova.virt.hardware [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1816.709145] env[63024]: DEBUG nova.virt.hardware [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1816.710435] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f8615b2-fd38-4a5a-9dfc-6f8d4af98d2e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.715961] env[63024]: DEBUG nova.scheduler.client.report [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1816.724224] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3987776a-7d9b-48b9-99a2-60774e8840f7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1816.799090] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b8afc1e8-6b6e-41fd-ba0d-632ce21d1f63 tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Lock "81f96b5a-b878-4e6c-9683-00528a4d5650" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.345s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1816.985276] env[63024]: DEBUG nova.compute.manager [req-0a0c76ce-4dd5-4b2f-8105-23e6f908a704 req-95c131bf-ed70-480b-9ffd-443947d1f6de service nova] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] Received event network-vif-plugged-23911ba0-0750-48de-9e80-03a0356b0496 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1816.985468] env[63024]: DEBUG oslo_concurrency.lockutils [req-0a0c76ce-4dd5-4b2f-8105-23e6f908a704 req-95c131bf-ed70-480b-9ffd-443947d1f6de service nova] Acquiring lock "7cf0ac90-d87d-4644-8a88-da5328d1721d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1816.985666] env[63024]: DEBUG oslo_concurrency.lockutils [req-0a0c76ce-4dd5-4b2f-8105-23e6f908a704 req-95c131bf-ed70-480b-9ffd-443947d1f6de service nova] Lock "7cf0ac90-d87d-4644-8a88-da5328d1721d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1816.985833] env[63024]: DEBUG oslo_concurrency.lockutils [req-0a0c76ce-4dd5-4b2f-8105-23e6f908a704 req-95c131bf-ed70-480b-9ffd-443947d1f6de service nova] Lock "7cf0ac90-d87d-4644-8a88-da5328d1721d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1816.985996] env[63024]: DEBUG nova.compute.manager [req-0a0c76ce-4dd5-4b2f-8105-23e6f908a704 req-95c131bf-ed70-480b-9ffd-443947d1f6de service nova] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] No waiting events found dispatching network-vif-plugged-23911ba0-0750-48de-9e80-03a0356b0496 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1816.986176] env[63024]: WARNING nova.compute.manager [req-0a0c76ce-4dd5-4b2f-8105-23e6f908a704 req-95c131bf-ed70-480b-9ffd-443947d1f6de service nova] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] Received unexpected event network-vif-plugged-23911ba0-0750-48de-9e80-03a0356b0496 for instance with vm_state building and task_state spawning. [ 1817.098766] env[63024]: DEBUG nova.network.neutron [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] Successfully updated port: 23911ba0-0750-48de-9e80-03a0356b0496 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1817.118874] env[63024]: DEBUG nova.compute.manager [req-0344f93e-513e-4a8a-a544-991073fde3df req-1330f100-048c-4731-94c8-41481ae2389b service nova] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Received event network-vif-plugged-4f25b42b-a210-4630-9dc5-b2e92c31b4f5 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1817.119109] env[63024]: DEBUG oslo_concurrency.lockutils [req-0344f93e-513e-4a8a-a544-991073fde3df req-1330f100-048c-4731-94c8-41481ae2389b service nova] Acquiring lock "ac60546a-37b2-4d2a-8505-61fe202e2ed0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1817.119318] env[63024]: DEBUG oslo_concurrency.lockutils [req-0344f93e-513e-4a8a-a544-991073fde3df req-1330f100-048c-4731-94c8-41481ae2389b service nova] Lock "ac60546a-37b2-4d2a-8505-61fe202e2ed0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1817.119482] env[63024]: DEBUG oslo_concurrency.lockutils [req-0344f93e-513e-4a8a-a544-991073fde3df req-1330f100-048c-4731-94c8-41481ae2389b service nova] Lock "ac60546a-37b2-4d2a-8505-61fe202e2ed0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1817.119649] env[63024]: DEBUG nova.compute.manager [req-0344f93e-513e-4a8a-a544-991073fde3df req-1330f100-048c-4731-94c8-41481ae2389b service nova] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] No waiting events found dispatching network-vif-plugged-4f25b42b-a210-4630-9dc5-b2e92c31b4f5 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1817.119814] env[63024]: WARNING nova.compute.manager [req-0344f93e-513e-4a8a-a544-991073fde3df req-1330f100-048c-4731-94c8-41481ae2389b service nova] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Received unexpected event network-vif-plugged-4f25b42b-a210-4630-9dc5-b2e92c31b4f5 for instance with vm_state building and task_state spawning. [ 1817.119968] env[63024]: DEBUG nova.compute.manager [req-0344f93e-513e-4a8a-a544-991073fde3df req-1330f100-048c-4731-94c8-41481ae2389b service nova] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Received event network-changed-4f25b42b-a210-4630-9dc5-b2e92c31b4f5 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1817.120136] env[63024]: DEBUG nova.compute.manager [req-0344f93e-513e-4a8a-a544-991073fde3df req-1330f100-048c-4731-94c8-41481ae2389b service nova] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Refreshing instance network info cache due to event network-changed-4f25b42b-a210-4630-9dc5-b2e92c31b4f5. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1817.120305] env[63024]: DEBUG oslo_concurrency.lockutils [req-0344f93e-513e-4a8a-a544-991073fde3df req-1330f100-048c-4731-94c8-41481ae2389b service nova] Acquiring lock "refresh_cache-ac60546a-37b2-4d2a-8505-61fe202e2ed0" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1817.198414] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Releasing lock "refresh_cache-ac60546a-37b2-4d2a-8505-61fe202e2ed0" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1817.198659] env[63024]: DEBUG nova.compute.manager [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Instance network_info: |[{"id": "4f25b42b-a210-4630-9dc5-b2e92c31b4f5", "address": "fa:16:3e:40:14:8d", "network": {"id": "3ece47b4-8cf7-4658-bdd0-f49089890e34", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-339226542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a158ec715843423e8f013939e0071c71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f25b42b-a2", "ovs_interfaceid": "4f25b42b-a210-4630-9dc5-b2e92c31b4f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1817.198986] env[63024]: DEBUG oslo_concurrency.lockutils [req-0344f93e-513e-4a8a-a544-991073fde3df req-1330f100-048c-4731-94c8-41481ae2389b service nova] Acquired lock "refresh_cache-ac60546a-37b2-4d2a-8505-61fe202e2ed0" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1817.199201] env[63024]: DEBUG nova.network.neutron [req-0344f93e-513e-4a8a-a544-991073fde3df req-1330f100-048c-4731-94c8-41481ae2389b service nova] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Refreshing network info cache for port 4f25b42b-a210-4630-9dc5-b2e92c31b4f5 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1817.200440] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:40:14:8d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0ea0fc1b-0424-46ec-bef5-6b57b7d184d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4f25b42b-a210-4630-9dc5-b2e92c31b4f5', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1817.208131] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Creating folder: Project (a158ec715843423e8f013939e0071c71). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1817.208590] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-92f57664-c3ff-42c7-9967-c05f262e384c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.220303] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Created folder: Project (a158ec715843423e8f013939e0071c71) in parent group-v401959. [ 1817.220491] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Creating folder: Instances. Parent ref: group-v402129. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1817.220990] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c5cccfac-cd87-4e4c-af3b-222a63737fe5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.224529] env[63024]: DEBUG oslo_concurrency.lockutils [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.603s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1817.224991] env[63024]: DEBUG nova.compute.manager [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1817.227616] env[63024]: DEBUG oslo_concurrency.lockutils [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 30.905s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1817.230242] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Created folder: Instances in parent group-v402129. [ 1817.230491] env[63024]: DEBUG oslo.service.loopingcall [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1817.231019] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1817.231100] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b074d4d7-c955-4057-a65a-29b457fe4be7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1817.250308] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1817.250308] env[63024]: value = "task-1951037" [ 1817.250308] env[63024]: _type = "Task" [ 1817.250308] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1817.258847] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951037, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.263885] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquiring lock "9ca6342c-55bd-4c78-9fa6-3caf4ec744bc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1817.264400] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Lock "9ca6342c-55bd-4c78-9fa6-3caf4ec744bc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1817.307042] env[63024]: DEBUG nova.compute.manager [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1817.317657] env[63024]: DEBUG nova.compute.manager [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Stashing vm_state: active {{(pid=63024) _prep_resize /opt/stack/nova/nova/compute/manager.py:5954}} [ 1817.601538] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Acquiring lock "refresh_cache-7cf0ac90-d87d-4644-8a88-da5328d1721d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1817.601538] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Acquired lock "refresh_cache-7cf0ac90-d87d-4644-8a88-da5328d1721d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1817.601732] env[63024]: DEBUG nova.network.neutron [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1817.730534] env[63024]: DEBUG nova.compute.utils [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1817.732428] env[63024]: DEBUG nova.compute.manager [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1817.732585] env[63024]: DEBUG nova.network.neutron [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1817.735421] env[63024]: DEBUG nova.objects.instance [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Lazy-loading 'migration_context' on Instance uuid 650a97b9-911e-44b0-9e82-a6d4cc95c9dd {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1817.762180] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951037, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1817.820739] env[63024]: DEBUG nova.policy [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3e68362e6da947cc996661188e7629f4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6f87eadd82394447910efa7b71814e97', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1817.841393] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1817.842593] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1817.974459] env[63024]: DEBUG nova.network.neutron [req-0344f93e-513e-4a8a-a544-991073fde3df req-1330f100-048c-4731-94c8-41481ae2389b service nova] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Updated VIF entry in instance network info cache for port 4f25b42b-a210-4630-9dc5-b2e92c31b4f5. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1817.974838] env[63024]: DEBUG nova.network.neutron [req-0344f93e-513e-4a8a-a544-991073fde3df req-1330f100-048c-4731-94c8-41481ae2389b service nova] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Updating instance_info_cache with network_info: [{"id": "4f25b42b-a210-4630-9dc5-b2e92c31b4f5", "address": "fa:16:3e:40:14:8d", "network": {"id": "3ece47b4-8cf7-4658-bdd0-f49089890e34", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-339226542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a158ec715843423e8f013939e0071c71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f25b42b-a2", "ovs_interfaceid": "4f25b42b-a210-4630-9dc5-b2e92c31b4f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1818.137899] env[63024]: DEBUG nova.network.neutron [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1818.181028] env[63024]: DEBUG nova.network.neutron [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Successfully created port: 05bc00c8-444d-425a-8c1e-0d34f269c7e8 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1818.237563] env[63024]: DEBUG nova.compute.manager [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1818.264779] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951037, 'name': CreateVM_Task, 'duration_secs': 0.51532} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1818.264962] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1818.268268] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1818.268972] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1818.268972] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1818.269232] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21501b75-e521-4428-9dae-bd3650de640c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.274290] env[63024]: DEBUG oslo_vmware.api [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Waiting for the task: (returnval){ [ 1818.274290] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5221b5a3-9767-9da6-95a0-d09712989a05" [ 1818.274290] env[63024]: _type = "Task" [ 1818.274290] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1818.286063] env[63024]: DEBUG oslo_vmware.api [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5221b5a3-9767-9da6-95a0-d09712989a05, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.298066] env[63024]: DEBUG nova.network.neutron [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] Updating instance_info_cache with network_info: [{"id": "23911ba0-0750-48de-9e80-03a0356b0496", "address": "fa:16:3e:c3:33:80", "network": {"id": "3ece47b4-8cf7-4658-bdd0-f49089890e34", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-339226542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a158ec715843423e8f013939e0071c71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23911ba0-07", "ovs_interfaceid": "23911ba0-0750-48de-9e80-03a0356b0496", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1818.480379] env[63024]: DEBUG oslo_concurrency.lockutils [req-0344f93e-513e-4a8a-a544-991073fde3df req-1330f100-048c-4731-94c8-41481ae2389b service nova] Releasing lock "refresh_cache-ac60546a-37b2-4d2a-8505-61fe202e2ed0" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1818.787258] env[63024]: DEBUG oslo_vmware.api [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5221b5a3-9767-9da6-95a0-d09712989a05, 'name': SearchDatastore_Task, 'duration_secs': 0.013762} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1818.788402] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1818.788665] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1818.788956] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1818.789127] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1818.789314] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1818.790057] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-904b7866-b783-4b2a-97f6-66e83aae9c3a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.792844] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-53a0f934-75e6-4f11-a679-b589372d64a2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.801067] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b89d3c4-8c8c-4ec3-9af9-1ca09873935a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.804338] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Releasing lock "refresh_cache-7cf0ac90-d87d-4644-8a88-da5328d1721d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1818.804638] env[63024]: DEBUG nova.compute.manager [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] Instance network_info: |[{"id": "23911ba0-0750-48de-9e80-03a0356b0496", "address": "fa:16:3e:c3:33:80", "network": {"id": "3ece47b4-8cf7-4658-bdd0-f49089890e34", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-339226542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a158ec715843423e8f013939e0071c71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23911ba0-07", "ovs_interfaceid": "23911ba0-0750-48de-9e80-03a0356b0496", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1818.805917] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1818.805917] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1818.805917] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c3:33:80', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0ea0fc1b-0424-46ec-bef5-6b57b7d184d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '23911ba0-0750-48de-9e80-03a0356b0496', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1818.813034] env[63024]: DEBUG oslo.service.loopingcall [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1818.814275] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18dd21d9-5427-4623-8c4b-c1f7bf755eef {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.842754] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1818.843701] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-456c8923-9f08-49aa-a81e-b952fb2aff70 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.859041] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6e1cb68-e1ea-42c3-8f3a-9926fba68a29 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.863417] env[63024]: DEBUG oslo_vmware.api [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Waiting for the task: (returnval){ [ 1818.863417] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52923b0a-1e52-d15c-8b33-05131a2357ab" [ 1818.863417] env[63024]: _type = "Task" [ 1818.863417] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1818.871658] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1590792f-63dd-4b3f-a751-bd81e996b0d0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1818.876044] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1818.876044] env[63024]: value = "task-1951038" [ 1818.876044] env[63024]: _type = "Task" [ 1818.876044] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1818.879470] env[63024]: DEBUG oslo_vmware.api [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52923b0a-1e52-d15c-8b33-05131a2357ab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1818.891469] env[63024]: DEBUG nova.compute.provider_tree [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1818.899635] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951038, 'name': CreateVM_Task} progress is 15%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.009417] env[63024]: DEBUG nova.compute.manager [req-2a5ddce4-335e-4a89-a322-381167885b94 req-27046532-df6f-44f4-81ee-c81c0444f7ca service nova] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] Received event network-changed-23911ba0-0750-48de-9e80-03a0356b0496 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1819.009621] env[63024]: DEBUG nova.compute.manager [req-2a5ddce4-335e-4a89-a322-381167885b94 req-27046532-df6f-44f4-81ee-c81c0444f7ca service nova] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] Refreshing instance network info cache due to event network-changed-23911ba0-0750-48de-9e80-03a0356b0496. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1819.009825] env[63024]: DEBUG oslo_concurrency.lockutils [req-2a5ddce4-335e-4a89-a322-381167885b94 req-27046532-df6f-44f4-81ee-c81c0444f7ca service nova] Acquiring lock "refresh_cache-7cf0ac90-d87d-4644-8a88-da5328d1721d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1819.010077] env[63024]: DEBUG oslo_concurrency.lockutils [req-2a5ddce4-335e-4a89-a322-381167885b94 req-27046532-df6f-44f4-81ee-c81c0444f7ca service nova] Acquired lock "refresh_cache-7cf0ac90-d87d-4644-8a88-da5328d1721d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1819.010247] env[63024]: DEBUG nova.network.neutron [req-2a5ddce4-335e-4a89-a322-381167885b94 req-27046532-df6f-44f4-81ee-c81c0444f7ca service nova] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] Refreshing network info cache for port 23911ba0-0750-48de-9e80-03a0356b0496 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1819.253100] env[63024]: DEBUG nova.compute.manager [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1819.281576] env[63024]: DEBUG nova.virt.hardware [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1819.281816] env[63024]: DEBUG nova.virt.hardware [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1819.281991] env[63024]: DEBUG nova.virt.hardware [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1819.282223] env[63024]: DEBUG nova.virt.hardware [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1819.282373] env[63024]: DEBUG nova.virt.hardware [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1819.282519] env[63024]: DEBUG nova.virt.hardware [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1819.282723] env[63024]: DEBUG nova.virt.hardware [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1819.282878] env[63024]: DEBUG nova.virt.hardware [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1819.283093] env[63024]: DEBUG nova.virt.hardware [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1819.283249] env[63024]: DEBUG nova.virt.hardware [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1819.284030] env[63024]: DEBUG nova.virt.hardware [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1819.284680] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b784ae64-ab4a-4058-a938-1886774c0cb9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.293185] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b232285-e32c-49b6-8f51-43e58f42853f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.375237] env[63024]: DEBUG oslo_vmware.api [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52923b0a-1e52-d15c-8b33-05131a2357ab, 'name': SearchDatastore_Task, 'duration_secs': 0.014474} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1819.376166] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f771de8-7d36-49d5-a64d-e0841127792e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.385977] env[63024]: DEBUG oslo_vmware.api [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Waiting for the task: (returnval){ [ 1819.385977] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5297da64-1e54-734f-0a3e-b9177ab55a2f" [ 1819.385977] env[63024]: _type = "Task" [ 1819.385977] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1819.393017] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951038, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.394172] env[63024]: DEBUG nova.scheduler.client.report [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1819.400352] env[63024]: DEBUG oslo_vmware.api [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5297da64-1e54-734f-0a3e-b9177ab55a2f, 'name': SearchDatastore_Task, 'duration_secs': 0.011883} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1819.400800] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1819.401072] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] ac60546a-37b2-4d2a-8505-61fe202e2ed0/ac60546a-37b2-4d2a-8505-61fe202e2ed0.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1819.401313] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b392de82-bde2-466b-b4d9-a787c386dab1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.407730] env[63024]: DEBUG oslo_vmware.api [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Waiting for the task: (returnval){ [ 1819.407730] env[63024]: value = "task-1951039" [ 1819.407730] env[63024]: _type = "Task" [ 1819.407730] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1819.416778] env[63024]: DEBUG oslo_vmware.api [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951039, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.644826] env[63024]: DEBUG nova.compute.manager [req-4b662b6f-e99c-40b6-ba05-2b5b1e1170c4 req-f284a316-f11f-47d7-8780-6fe025413366 service nova] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Received event network-vif-plugged-05bc00c8-444d-425a-8c1e-0d34f269c7e8 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1819.646713] env[63024]: DEBUG oslo_concurrency.lockutils [req-4b662b6f-e99c-40b6-ba05-2b5b1e1170c4 req-f284a316-f11f-47d7-8780-6fe025413366 service nova] Acquiring lock "92d1f96e-bbe7-4654-9d3a-47ba40057157-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1819.646713] env[63024]: DEBUG oslo_concurrency.lockutils [req-4b662b6f-e99c-40b6-ba05-2b5b1e1170c4 req-f284a316-f11f-47d7-8780-6fe025413366 service nova] Lock "92d1f96e-bbe7-4654-9d3a-47ba40057157-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1819.646713] env[63024]: DEBUG oslo_concurrency.lockutils [req-4b662b6f-e99c-40b6-ba05-2b5b1e1170c4 req-f284a316-f11f-47d7-8780-6fe025413366 service nova] Lock "92d1f96e-bbe7-4654-9d3a-47ba40057157-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1819.646713] env[63024]: DEBUG nova.compute.manager [req-4b662b6f-e99c-40b6-ba05-2b5b1e1170c4 req-f284a316-f11f-47d7-8780-6fe025413366 service nova] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] No waiting events found dispatching network-vif-plugged-05bc00c8-444d-425a-8c1e-0d34f269c7e8 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1819.646713] env[63024]: WARNING nova.compute.manager [req-4b662b6f-e99c-40b6-ba05-2b5b1e1170c4 req-f284a316-f11f-47d7-8780-6fe025413366 service nova] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Received unexpected event network-vif-plugged-05bc00c8-444d-425a-8c1e-0d34f269c7e8 for instance with vm_state building and task_state spawning. [ 1819.733654] env[63024]: DEBUG nova.network.neutron [req-2a5ddce4-335e-4a89-a322-381167885b94 req-27046532-df6f-44f4-81ee-c81c0444f7ca service nova] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] Updated VIF entry in instance network info cache for port 23911ba0-0750-48de-9e80-03a0356b0496. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1819.734025] env[63024]: DEBUG nova.network.neutron [req-2a5ddce4-335e-4a89-a322-381167885b94 req-27046532-df6f-44f4-81ee-c81c0444f7ca service nova] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] Updating instance_info_cache with network_info: [{"id": "23911ba0-0750-48de-9e80-03a0356b0496", "address": "fa:16:3e:c3:33:80", "network": {"id": "3ece47b4-8cf7-4658-bdd0-f49089890e34", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-339226542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a158ec715843423e8f013939e0071c71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23911ba0-07", "ovs_interfaceid": "23911ba0-0750-48de-9e80-03a0356b0496", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1819.851131] env[63024]: DEBUG nova.network.neutron [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Successfully updated port: 05bc00c8-444d-425a-8c1e-0d34f269c7e8 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1819.889673] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951038, 'name': CreateVM_Task, 'duration_secs': 0.61034} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1819.889844] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1819.890533] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1819.890702] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1819.891051] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1819.891303] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6e3e031-d8de-4cd9-8dc8-d868247a4a11 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1819.896219] env[63024]: DEBUG oslo_vmware.api [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Waiting for the task: (returnval){ [ 1819.896219] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5205d6d5-e2ee-d2c0-ea44-9c5fe08e3b72" [ 1819.896219] env[63024]: _type = "Task" [ 1819.896219] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1819.907186] env[63024]: DEBUG oslo_vmware.api [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5205d6d5-e2ee-d2c0-ea44-9c5fe08e3b72, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1819.915517] env[63024]: DEBUG oslo_vmware.api [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951039, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.098208] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c9141bc3-f30c-481e-b147-d3f10e57b8cd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Acquiring lock "81f96b5a-b878-4e6c-9683-00528a4d5650" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1820.098584] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c9141bc3-f30c-481e-b147-d3f10e57b8cd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Lock "81f96b5a-b878-4e6c-9683-00528a4d5650" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1820.098984] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c9141bc3-f30c-481e-b147-d3f10e57b8cd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Acquiring lock "81f96b5a-b878-4e6c-9683-00528a4d5650-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1820.099236] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c9141bc3-f30c-481e-b147-d3f10e57b8cd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Lock "81f96b5a-b878-4e6c-9683-00528a4d5650-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1820.099424] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c9141bc3-f30c-481e-b147-d3f10e57b8cd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Lock "81f96b5a-b878-4e6c-9683-00528a4d5650-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1820.101793] env[63024]: INFO nova.compute.manager [None req-c9141bc3-f30c-481e-b147-d3f10e57b8cd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] Terminating instance [ 1820.237096] env[63024]: DEBUG oslo_concurrency.lockutils [req-2a5ddce4-335e-4a89-a322-381167885b94 req-27046532-df6f-44f4-81ee-c81c0444f7ca service nova] Releasing lock "refresh_cache-7cf0ac90-d87d-4644-8a88-da5328d1721d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1820.353535] env[63024]: DEBUG oslo_concurrency.lockutils [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Acquiring lock "refresh_cache-92d1f96e-bbe7-4654-9d3a-47ba40057157" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1820.353812] env[63024]: DEBUG oslo_concurrency.lockutils [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Acquired lock "refresh_cache-92d1f96e-bbe7-4654-9d3a-47ba40057157" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1820.353851] env[63024]: DEBUG nova.network.neutron [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1820.409630] env[63024]: DEBUG oslo_concurrency.lockutils [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 3.182s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1820.415487] env[63024]: DEBUG oslo_vmware.api [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5205d6d5-e2ee-d2c0-ea44-9c5fe08e3b72, 'name': SearchDatastore_Task, 'duration_secs': 0.033303} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1820.417532] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.423s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1820.418918] env[63024]: INFO nova.compute.claims [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1820.424644] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1820.424810] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1820.425516] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1820.425516] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1820.425516] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1820.431029] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f98b93f0-e6ea-40de-906a-7b7fdebcbb24 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.438121] env[63024]: DEBUG oslo_vmware.api [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951039, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.960147} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1820.439183] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] ac60546a-37b2-4d2a-8505-61fe202e2ed0/ac60546a-37b2-4d2a-8505-61fe202e2ed0.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1820.441034] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1820.441034] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1820.441034] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1820.441034] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-170b9c35-355e-4d10-a0be-4a7207d9b82d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.445471] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-342b2038-1ef5-4752-b034-0eabc2588674 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.448522] env[63024]: DEBUG oslo_vmware.api [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Waiting for the task: (returnval){ [ 1820.448522] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5284bbc2-c2e3-f3dc-b014-2fabe4330fe5" [ 1820.448522] env[63024]: _type = "Task" [ 1820.448522] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1820.453756] env[63024]: DEBUG oslo_vmware.api [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Waiting for the task: (returnval){ [ 1820.453756] env[63024]: value = "task-1951040" [ 1820.453756] env[63024]: _type = "Task" [ 1820.453756] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1820.461093] env[63024]: DEBUG oslo_vmware.api [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5284bbc2-c2e3-f3dc-b014-2fabe4330fe5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.466388] env[63024]: DEBUG oslo_vmware.api [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951040, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.607465] env[63024]: DEBUG nova.compute.manager [None req-c9141bc3-f30c-481e-b147-d3f10e57b8cd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1820.607707] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c9141bc3-f30c-481e-b147-d3f10e57b8cd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1820.608602] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7465b8a0-1741-4657-9894-3b2401070eae {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.616174] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9141bc3-f30c-481e-b147-d3f10e57b8cd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1820.616387] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-98e2aca6-1cb6-42a4-8dbb-759db74a1003 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.622257] env[63024]: DEBUG oslo_vmware.api [None req-c9141bc3-f30c-481e-b147-d3f10e57b8cd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Waiting for the task: (returnval){ [ 1820.622257] env[63024]: value = "task-1951041" [ 1820.622257] env[63024]: _type = "Task" [ 1820.622257] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1820.629618] env[63024]: DEBUG oslo_vmware.api [None req-c9141bc3-f30c-481e-b147-d3f10e57b8cd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': task-1951041, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1820.922549] env[63024]: DEBUG nova.network.neutron [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1820.964462] env[63024]: DEBUG oslo_vmware.api [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5284bbc2-c2e3-f3dc-b014-2fabe4330fe5, 'name': SearchDatastore_Task, 'duration_secs': 0.011058} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1820.967956] env[63024]: DEBUG oslo_vmware.api [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951040, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.279099} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1820.968193] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2656f55c-7022-4e66-ab73-f1e81b4d7c47 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.973017] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1820.973017] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-499a4040-2856-47c1-9da1-494a9c36d3c7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1820.976840] env[63024]: DEBUG oslo_vmware.api [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Waiting for the task: (returnval){ [ 1820.976840] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]523c7d4f-6a03-d5da-b687-29199fc49c04" [ 1820.976840] env[63024]: _type = "Task" [ 1820.976840] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1820.997135] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Reconfiguring VM instance instance-0000003c to attach disk [datastore1] ac60546a-37b2-4d2a-8505-61fe202e2ed0/ac60546a-37b2-4d2a-8505-61fe202e2ed0.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1820.997938] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bbf98dd8-986d-4e81-938f-2a1c4e27a03a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.018722] env[63024]: DEBUG oslo_vmware.api [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]523c7d4f-6a03-d5da-b687-29199fc49c04, 'name': SearchDatastore_Task, 'duration_secs': 0.010229} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1821.019748] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1821.020076] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 7cf0ac90-d87d-4644-8a88-da5328d1721d/7cf0ac90-d87d-4644-8a88-da5328d1721d.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1821.020351] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-721d4d21-8e34-4110-a1bf-aaf5a16e32cc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.026068] env[63024]: DEBUG oslo_vmware.api [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Waiting for the task: (returnval){ [ 1821.026068] env[63024]: value = "task-1951042" [ 1821.026068] env[63024]: _type = "Task" [ 1821.026068] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1821.030109] env[63024]: DEBUG oslo_vmware.api [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Waiting for the task: (returnval){ [ 1821.030109] env[63024]: value = "task-1951043" [ 1821.030109] env[63024]: _type = "Task" [ 1821.030109] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1821.036270] env[63024]: DEBUG oslo_vmware.api [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951042, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.040621] env[63024]: DEBUG oslo_vmware.api [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951043, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.132096] env[63024]: DEBUG oslo_vmware.api [None req-c9141bc3-f30c-481e-b147-d3f10e57b8cd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': task-1951041, 'name': PowerOffVM_Task, 'duration_secs': 0.168763} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1821.132452] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9141bc3-f30c-481e-b147-d3f10e57b8cd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1821.132573] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c9141bc3-f30c-481e-b147-d3f10e57b8cd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1821.132823] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5703d41b-f7dd-4d0c-9fae-93332cea45b0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.206168] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c9141bc3-f30c-481e-b147-d3f10e57b8cd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1821.206399] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c9141bc3-f30c-481e-b147-d3f10e57b8cd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1821.206625] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9141bc3-f30c-481e-b147-d3f10e57b8cd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Deleting the datastore file [datastore1] 81f96b5a-b878-4e6c-9683-00528a4d5650 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1821.206931] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-24068669-f4ab-40d6-a5ba-9825c8c95a82 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.213713] env[63024]: DEBUG oslo_vmware.api [None req-c9141bc3-f30c-481e-b147-d3f10e57b8cd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Waiting for the task: (returnval){ [ 1821.213713] env[63024]: value = "task-1951045" [ 1821.213713] env[63024]: _type = "Task" [ 1821.213713] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1821.224085] env[63024]: DEBUG oslo_vmware.api [None req-c9141bc3-f30c-481e-b147-d3f10e57b8cd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': task-1951045, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.226605] env[63024]: DEBUG nova.network.neutron [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Updating instance_info_cache with network_info: [{"id": "05bc00c8-444d-425a-8c1e-0d34f269c7e8", "address": "fa:16:3e:d8:3a:2e", "network": {"id": "feb2323b-f3cf-42d6-a22b-81d1c94fce9d", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-75667819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f87eadd82394447910efa7b71814e97", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "44ed8f45-cb8e-40e7-ac70-a7f386a7d2c2", "external-id": "nsx-vlan-transportzone-268", "segmentation_id": 268, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05bc00c8-44", "ovs_interfaceid": "05bc00c8-444d-425a-8c1e-0d34f269c7e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1821.543550] env[63024]: DEBUG oslo_vmware.api [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951042, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.550829] env[63024]: DEBUG oslo_vmware.api [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951043, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.462363} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1821.554230] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 7cf0ac90-d87d-4644-8a88-da5328d1721d/7cf0ac90-d87d-4644-8a88-da5328d1721d.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1821.554230] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1821.554230] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3176a71a-8a66-487b-b1d7-da373b0bdfdc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.561428] env[63024]: DEBUG oslo_vmware.api [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Waiting for the task: (returnval){ [ 1821.561428] env[63024]: value = "task-1951046" [ 1821.561428] env[63024]: _type = "Task" [ 1821.561428] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1821.575235] env[63024]: DEBUG oslo_vmware.api [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951046, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.727224] env[63024]: DEBUG oslo_vmware.api [None req-c9141bc3-f30c-481e-b147-d3f10e57b8cd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': task-1951045, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.466685} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1821.727775] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9141bc3-f30c-481e-b147-d3f10e57b8cd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1821.727999] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c9141bc3-f30c-481e-b147-d3f10e57b8cd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1821.728261] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c9141bc3-f30c-481e-b147-d3f10e57b8cd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1821.728448] env[63024]: INFO nova.compute.manager [None req-c9141bc3-f30c-481e-b147-d3f10e57b8cd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1821.728689] env[63024]: DEBUG oslo.service.loopingcall [None req-c9141bc3-f30c-481e-b147-d3f10e57b8cd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1821.729227] env[63024]: DEBUG oslo_concurrency.lockutils [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Releasing lock "refresh_cache-92d1f96e-bbe7-4654-9d3a-47ba40057157" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1821.729524] env[63024]: DEBUG nova.compute.manager [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Instance network_info: |[{"id": "05bc00c8-444d-425a-8c1e-0d34f269c7e8", "address": "fa:16:3e:d8:3a:2e", "network": {"id": "feb2323b-f3cf-42d6-a22b-81d1c94fce9d", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-75667819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f87eadd82394447910efa7b71814e97", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "44ed8f45-cb8e-40e7-ac70-a7f386a7d2c2", "external-id": "nsx-vlan-transportzone-268", "segmentation_id": 268, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05bc00c8-44", "ovs_interfaceid": "05bc00c8-444d-425a-8c1e-0d34f269c7e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1821.730035] env[63024]: DEBUG nova.compute.manager [-] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1821.730125] env[63024]: DEBUG nova.network.neutron [-] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1821.737653] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d8:3a:2e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '44ed8f45-cb8e-40e7-ac70-a7f386a7d2c2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '05bc00c8-444d-425a-8c1e-0d34f269c7e8', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1821.743010] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Creating folder: Project (6f87eadd82394447910efa7b71814e97). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1821.744358] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-58cfca6e-6443-468f-8ae3-f2c8886730f2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.754440] env[63024]: DEBUG nova.compute.manager [req-19024aa4-45ef-4fd3-b860-2d2040e5c768 req-db6debfb-1793-486a-b9d3-b02dc82b3029 service nova] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Received event network-changed-05bc00c8-444d-425a-8c1e-0d34f269c7e8 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1821.754629] env[63024]: DEBUG nova.compute.manager [req-19024aa4-45ef-4fd3-b860-2d2040e5c768 req-db6debfb-1793-486a-b9d3-b02dc82b3029 service nova] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Refreshing instance network info cache due to event network-changed-05bc00c8-444d-425a-8c1e-0d34f269c7e8. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1821.754920] env[63024]: DEBUG oslo_concurrency.lockutils [req-19024aa4-45ef-4fd3-b860-2d2040e5c768 req-db6debfb-1793-486a-b9d3-b02dc82b3029 service nova] Acquiring lock "refresh_cache-92d1f96e-bbe7-4654-9d3a-47ba40057157" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1821.754976] env[63024]: DEBUG oslo_concurrency.lockutils [req-19024aa4-45ef-4fd3-b860-2d2040e5c768 req-db6debfb-1793-486a-b9d3-b02dc82b3029 service nova] Acquired lock "refresh_cache-92d1f96e-bbe7-4654-9d3a-47ba40057157" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1821.755532] env[63024]: DEBUG nova.network.neutron [req-19024aa4-45ef-4fd3-b860-2d2040e5c768 req-db6debfb-1793-486a-b9d3-b02dc82b3029 service nova] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Refreshing network info cache for port 05bc00c8-444d-425a-8c1e-0d34f269c7e8 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1821.760732] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Created folder: Project (6f87eadd82394447910efa7b71814e97) in parent group-v401959. [ 1821.760874] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Creating folder: Instances. Parent ref: group-v402133. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1821.761688] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c3316c71-6aa0-4f12-ad07-d215412af1da {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.772227] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Created folder: Instances in parent group-v402133. [ 1821.772397] env[63024]: DEBUG oslo.service.loopingcall [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1821.772505] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1821.773124] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1df445bc-1507-403b-99ba-f37828b21ee8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1821.798218] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1821.798218] env[63024]: value = "task-1951049" [ 1821.798218] env[63024]: _type = "Task" [ 1821.798218] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1821.807126] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951049, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1821.964860] env[63024]: INFO nova.compute.manager [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Swapping old allocation on dict_keys(['89dfa68a-133e-436f-a9f1-86051f9fb96b']) held by migration cb60c03c-4816-43a9-a522-d9e511f6ee40 for instance [ 1821.988977] env[63024]: DEBUG nova.scheduler.client.report [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Overwriting current allocation {'allocations': {'89dfa68a-133e-436f-a9f1-86051f9fb96b': {'resources': {'VCPU': 1, 'MEMORY_MB': 256, 'DISK_GB': 1}, 'generation': 103}}, 'project_id': '8a5452991c0c433d987f52bad5c89d22', 'user_id': '868b92472ee24327a55c68efce691ba9', 'consumer_generation': 1} on consumer 650a97b9-911e-44b0-9e82-a6d4cc95c9dd {{(pid=63024) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2033}} [ 1822.048296] env[63024]: DEBUG oslo_vmware.api [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951042, 'name': ReconfigVM_Task, 'duration_secs': 0.704471} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1822.048668] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Reconfigured VM instance instance-0000003c to attach disk [datastore1] ac60546a-37b2-4d2a-8505-61fe202e2ed0/ac60546a-37b2-4d2a-8505-61fe202e2ed0.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1822.049342] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-70fc87ac-b3db-4af3-9951-146521279329 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.060298] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b665fa7-4131-464f-b15b-818e7221daf0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.063142] env[63024]: DEBUG oslo_vmware.api [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Waiting for the task: (returnval){ [ 1822.063142] env[63024]: value = "task-1951050" [ 1822.063142] env[63024]: _type = "Task" [ 1822.063142] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.073057] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e4cdc61-d259-4efb-82ac-7fb702b98d34 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.082271] env[63024]: DEBUG oslo_vmware.api [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951050, 'name': Rename_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.082636] env[63024]: DEBUG oslo_vmware.api [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951046, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.133528} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1822.083365] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1822.084185] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6d9337f-8ae2-4c44-b04f-ed18e9a87f1a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.112296] env[63024]: DEBUG oslo_concurrency.lockutils [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Acquiring lock "refresh_cache-650a97b9-911e-44b0-9e82-a6d4cc95c9dd" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1822.112628] env[63024]: DEBUG oslo_concurrency.lockutils [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Acquired lock "refresh_cache-650a97b9-911e-44b0-9e82-a6d4cc95c9dd" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1822.112694] env[63024]: DEBUG nova.network.neutron [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1822.114899] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b76440ee-9b02-493c-ac34-e22fcb5bc5d7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.136707] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] Reconfiguring VM instance instance-0000003d to attach disk [datastore1] 7cf0ac90-d87d-4644-8a88-da5328d1721d/7cf0ac90-d87d-4644-8a88-da5328d1721d.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1822.137864] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3a252599-1820-4eab-9732-2982fe003cab {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.158024] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1ea3b7f-445a-4a03-a9cb-3fc8aaafb238 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.163438] env[63024]: DEBUG oslo_vmware.api [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Waiting for the task: (returnval){ [ 1822.163438] env[63024]: value = "task-1951051" [ 1822.163438] env[63024]: _type = "Task" [ 1822.163438] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.176691] env[63024]: DEBUG nova.compute.provider_tree [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1822.184693] env[63024]: DEBUG oslo_vmware.api [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951051, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.310389] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951049, 'name': CreateVM_Task, 'duration_secs': 0.358981} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1822.310557] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1822.311281] env[63024]: DEBUG oslo_concurrency.lockutils [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1822.311451] env[63024]: DEBUG oslo_concurrency.lockutils [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1822.311753] env[63024]: DEBUG oslo_concurrency.lockutils [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1822.312045] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b906e3a-d122-4d0d-85a6-bbf020468e89 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.316693] env[63024]: DEBUG oslo_vmware.api [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Waiting for the task: (returnval){ [ 1822.316693] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52208c89-33e4-96a7-0b9f-17727af9d9de" [ 1822.316693] env[63024]: _type = "Task" [ 1822.316693] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.324737] env[63024]: DEBUG oslo_vmware.api [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52208c89-33e4-96a7-0b9f-17727af9d9de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.527781] env[63024]: DEBUG nova.network.neutron [-] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1822.574033] env[63024]: DEBUG oslo_vmware.api [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951050, 'name': Rename_Task, 'duration_secs': 0.144588} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1822.574340] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1822.574545] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5683bbbb-bca0-42c2-8502-2874f57d2174 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.581171] env[63024]: DEBUG oslo_vmware.api [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Waiting for the task: (returnval){ [ 1822.581171] env[63024]: value = "task-1951052" [ 1822.581171] env[63024]: _type = "Task" [ 1822.581171] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.591243] env[63024]: DEBUG oslo_vmware.api [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951052, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.619275] env[63024]: DEBUG nova.network.neutron [req-19024aa4-45ef-4fd3-b860-2d2040e5c768 req-db6debfb-1793-486a-b9d3-b02dc82b3029 service nova] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Updated VIF entry in instance network info cache for port 05bc00c8-444d-425a-8c1e-0d34f269c7e8. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1822.619603] env[63024]: DEBUG nova.network.neutron [req-19024aa4-45ef-4fd3-b860-2d2040e5c768 req-db6debfb-1793-486a-b9d3-b02dc82b3029 service nova] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Updating instance_info_cache with network_info: [{"id": "05bc00c8-444d-425a-8c1e-0d34f269c7e8", "address": "fa:16:3e:d8:3a:2e", "network": {"id": "feb2323b-f3cf-42d6-a22b-81d1c94fce9d", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-75667819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f87eadd82394447910efa7b71814e97", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "44ed8f45-cb8e-40e7-ac70-a7f386a7d2c2", "external-id": "nsx-vlan-transportzone-268", "segmentation_id": 268, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05bc00c8-44", "ovs_interfaceid": "05bc00c8-444d-425a-8c1e-0d34f269c7e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1822.675075] env[63024]: DEBUG oslo_vmware.api [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951051, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1822.679575] env[63024]: DEBUG nova.scheduler.client.report [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1822.827281] env[63024]: DEBUG oslo_vmware.api [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52208c89-33e4-96a7-0b9f-17727af9d9de, 'name': SearchDatastore_Task, 'duration_secs': 0.009407} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1822.827612] env[63024]: DEBUG oslo_concurrency.lockutils [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1822.827856] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1822.828110] env[63024]: DEBUG oslo_concurrency.lockutils [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1822.828262] env[63024]: DEBUG oslo_concurrency.lockutils [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1822.828445] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1822.828709] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-87e90038-00f0-4ec0-b319-0021e7dfbd82 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.837575] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1822.837772] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1822.838481] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27a77981-e391-4a0c-bd26-42371758189f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1822.843812] env[63024]: DEBUG oslo_vmware.api [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Waiting for the task: (returnval){ [ 1822.843812] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52cc6ea0-1312-befb-5d52-5064d16a3d3a" [ 1822.843812] env[63024]: _type = "Task" [ 1822.843812] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1822.851613] env[63024]: DEBUG oslo_vmware.api [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52cc6ea0-1312-befb-5d52-5064d16a3d3a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.032855] env[63024]: INFO nova.compute.manager [-] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] Took 1.30 seconds to deallocate network for instance. [ 1823.092971] env[63024]: DEBUG oslo_vmware.api [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951052, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.096576] env[63024]: DEBUG nova.network.neutron [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Updating instance_info_cache with network_info: [{"id": "241606ef-afe1-4ca8-912c-dae7639e4941", "address": "fa:16:3e:fc:f3:01", "network": {"id": "b22bfe1a-f169-41c3-ac9b-fdcf93268110", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.130", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d78401abb63840f4b461856cfdb6dbbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap241606ef-af", "ovs_interfaceid": "241606ef-afe1-4ca8-912c-dae7639e4941", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1823.122381] env[63024]: DEBUG oslo_concurrency.lockutils [req-19024aa4-45ef-4fd3-b860-2d2040e5c768 req-db6debfb-1793-486a-b9d3-b02dc82b3029 service nova] Releasing lock "refresh_cache-92d1f96e-bbe7-4654-9d3a-47ba40057157" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1823.174513] env[63024]: DEBUG oslo_vmware.api [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951051, 'name': ReconfigVM_Task, 'duration_secs': 0.66745} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1823.174796] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] Reconfigured VM instance instance-0000003d to attach disk [datastore1] 7cf0ac90-d87d-4644-8a88-da5328d1721d/7cf0ac90-d87d-4644-8a88-da5328d1721d.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1823.175438] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4b4be292-86f9-4a7e-8ef4-881f0233439c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.182092] env[63024]: DEBUG oslo_vmware.api [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Waiting for the task: (returnval){ [ 1823.182092] env[63024]: value = "task-1951053" [ 1823.182092] env[63024]: _type = "Task" [ 1823.182092] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1823.185717] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.768s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1823.186209] env[63024]: DEBUG nova.compute.manager [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1823.188795] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 20.910s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1823.188984] env[63024]: DEBUG nova.objects.instance [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63024) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1823.198735] env[63024]: DEBUG oslo_vmware.api [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951053, 'name': Rename_Task} progress is 10%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.355274] env[63024]: DEBUG oslo_vmware.api [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52cc6ea0-1312-befb-5d52-5064d16a3d3a, 'name': SearchDatastore_Task, 'duration_secs': 0.008436} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1823.355992] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b61c3705-d02f-426b-b1e1-c34a9a7d137e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.362136] env[63024]: DEBUG oslo_vmware.api [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Waiting for the task: (returnval){ [ 1823.362136] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52d808db-1724-0520-37e6-4088f9e8f2f5" [ 1823.362136] env[63024]: _type = "Task" [ 1823.362136] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1823.370083] env[63024]: DEBUG oslo_vmware.api [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52d808db-1724-0520-37e6-4088f9e8f2f5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.540169] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c9141bc3-f30c-481e-b147-d3f10e57b8cd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1823.591535] env[63024]: DEBUG oslo_vmware.api [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951052, 'name': PowerOnVM_Task, 'duration_secs': 0.707213} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1823.591808] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1823.592046] env[63024]: INFO nova.compute.manager [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Took 10.02 seconds to spawn the instance on the hypervisor. [ 1823.592238] env[63024]: DEBUG nova.compute.manager [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1823.592999] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7abe1d75-1267-4436-ba7c-9b23e894c2bf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.599161] env[63024]: DEBUG oslo_concurrency.lockutils [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Releasing lock "refresh_cache-650a97b9-911e-44b0-9e82-a6d4cc95c9dd" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1823.599567] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1823.602391] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2fe4cba0-ca22-45b5-8af1-03ae88cb9f4c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.610908] env[63024]: DEBUG oslo_vmware.api [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Waiting for the task: (returnval){ [ 1823.610908] env[63024]: value = "task-1951054" [ 1823.610908] env[63024]: _type = "Task" [ 1823.610908] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1823.618565] env[63024]: DEBUG oslo_vmware.api [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1951054, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.692826] env[63024]: DEBUG oslo_vmware.api [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951053, 'name': Rename_Task, 'duration_secs': 0.150988} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1823.692826] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1823.693041] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3664e881-7f3a-47aa-881d-4bfcd30cca46 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.698481] env[63024]: DEBUG nova.compute.utils [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1823.699904] env[63024]: DEBUG nova.compute.manager [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1823.700108] env[63024]: DEBUG nova.network.neutron [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1823.708924] env[63024]: DEBUG oslo_vmware.api [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Waiting for the task: (returnval){ [ 1823.708924] env[63024]: value = "task-1951055" [ 1823.708924] env[63024]: _type = "Task" [ 1823.708924] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1823.720240] env[63024]: DEBUG oslo_vmware.api [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951055, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1823.742016] env[63024]: DEBUG nova.policy [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c15b1302495a4cf781e5b9abae4f462f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a158ec715843423e8f013939e0071c71', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1823.788068] env[63024]: DEBUG nova.compute.manager [req-cbd9043c-d962-4e25-b5e3-8a401f652dc7 req-84bf0fd7-ffc4-4ede-bd08-44c7545cd9de service nova] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] Received event network-vif-deleted-22f19c21-5ea3-4b2f-9b37-fa34262081b9 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1823.872873] env[63024]: DEBUG oslo_vmware.api [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52d808db-1724-0520-37e6-4088f9e8f2f5, 'name': SearchDatastore_Task, 'duration_secs': 0.009868} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1823.873174] env[63024]: DEBUG oslo_concurrency.lockutils [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1823.873471] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 92d1f96e-bbe7-4654-9d3a-47ba40057157/92d1f96e-bbe7-4654-9d3a-47ba40057157.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1823.873737] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4b5c6a25-fea5-4c0c-a454-85081ca4cce4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1823.880739] env[63024]: DEBUG oslo_vmware.api [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Waiting for the task: (returnval){ [ 1823.880739] env[63024]: value = "task-1951056" [ 1823.880739] env[63024]: _type = "Task" [ 1823.880739] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1823.888841] env[63024]: DEBUG oslo_vmware.api [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951056, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.005934] env[63024]: DEBUG nova.network.neutron [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] Successfully created port: c38c5b06-5451-45cd-9156-009ba98e5596 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1824.115175] env[63024]: INFO nova.compute.manager [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Took 45.00 seconds to build instance. [ 1824.123350] env[63024]: DEBUG oslo_vmware.api [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1951054, 'name': PowerOffVM_Task, 'duration_secs': 0.314654} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1824.124027] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1824.124374] env[63024]: DEBUG nova.virt.hardware [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T11:08:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='270e2a66-632e-41fa-bb7b-06506c9e6093',id=37,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-717992172',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1824.124606] env[63024]: DEBUG nova.virt.hardware [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1824.124823] env[63024]: DEBUG nova.virt.hardware [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1824.125051] env[63024]: DEBUG nova.virt.hardware [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1824.125212] env[63024]: DEBUG nova.virt.hardware [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1824.125597] env[63024]: DEBUG nova.virt.hardware [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1824.125647] env[63024]: DEBUG nova.virt.hardware [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1824.125790] env[63024]: DEBUG nova.virt.hardware [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1824.125960] env[63024]: DEBUG nova.virt.hardware [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1824.126153] env[63024]: DEBUG nova.virt.hardware [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1824.126335] env[63024]: DEBUG nova.virt.hardware [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1824.134157] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ede2f495-fd2c-49ad-81e7-93581cab3ab7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.180691] env[63024]: DEBUG oslo_vmware.api [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Waiting for the task: (returnval){ [ 1824.180691] env[63024]: value = "task-1951057" [ 1824.180691] env[63024]: _type = "Task" [ 1824.180691] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1824.192082] env[63024]: DEBUG oslo_vmware.api [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1951057, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.201483] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f2b21d87-b4fb-43f5-9913-1a103428d598 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1824.202664] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.837s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1824.204200] env[63024]: INFO nova.compute.claims [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1824.210022] env[63024]: DEBUG nova.compute.manager [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1824.222086] env[63024]: DEBUG oslo_vmware.api [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951055, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.394031] env[63024]: DEBUG oslo_vmware.api [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951056, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.503891} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1824.394031] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 92d1f96e-bbe7-4654-9d3a-47ba40057157/92d1f96e-bbe7-4654-9d3a-47ba40057157.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1824.394031] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1824.394031] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-619e3841-f151-4376-8198-f8e91acd1777 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.401185] env[63024]: DEBUG oslo_vmware.api [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Waiting for the task: (returnval){ [ 1824.401185] env[63024]: value = "task-1951058" [ 1824.401185] env[63024]: _type = "Task" [ 1824.401185] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1824.407778] env[63024]: DEBUG oslo_vmware.api [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951058, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.617455] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3f3108b7-5d82-4b0f-926b-7879fd431ae4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Lock "ac60546a-37b2-4d2a-8505-61fe202e2ed0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 72.044s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1824.691675] env[63024]: DEBUG oslo_vmware.api [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1951057, 'name': ReconfigVM_Task, 'duration_secs': 0.320776} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1824.692609] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa116951-9cd4-43ba-a678-90936a2f8c8e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.718683] env[63024]: DEBUG nova.virt.hardware [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T11:08:34Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='270e2a66-632e-41fa-bb7b-06506c9e6093',id=37,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-717992172',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1824.718683] env[63024]: DEBUG nova.virt.hardware [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1824.718830] env[63024]: DEBUG nova.virt.hardware [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1824.719151] env[63024]: DEBUG nova.virt.hardware [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1824.719151] env[63024]: DEBUG nova.virt.hardware [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1824.719258] env[63024]: DEBUG nova.virt.hardware [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1824.719468] env[63024]: DEBUG nova.virt.hardware [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1824.719629] env[63024]: DEBUG nova.virt.hardware [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1824.719793] env[63024]: DEBUG nova.virt.hardware [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1824.720102] env[63024]: DEBUG nova.virt.hardware [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1824.720165] env[63024]: DEBUG nova.virt.hardware [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1824.726930] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ec14aba-ac93-409e-8bb3-b17f5a5ecaff {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.733718] env[63024]: DEBUG oslo_vmware.api [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Waiting for the task: (returnval){ [ 1824.733718] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5264d961-7ec4-f06f-7c33-64f9cc31eb04" [ 1824.733718] env[63024]: _type = "Task" [ 1824.733718] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1824.737076] env[63024]: DEBUG oslo_vmware.api [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951055, 'name': PowerOnVM_Task, 'duration_secs': 0.793892} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1824.741462] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1824.741677] env[63024]: INFO nova.compute.manager [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] Took 8.09 seconds to spawn the instance on the hypervisor. [ 1824.741858] env[63024]: DEBUG nova.compute.manager [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1824.743122] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7395be76-ec34-4532-a3d6-1da41e75948c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.751731] env[63024]: DEBUG oslo_vmware.api [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5264d961-7ec4-f06f-7c33-64f9cc31eb04, 'name': SearchDatastore_Task, 'duration_secs': 0.006195} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1824.758467] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Reconfiguring VM instance instance-00000027 to detach disk 2000 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1824.762586] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2c09cebb-a95d-4eea-9b95-9f41a9f1877e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.784839] env[63024]: DEBUG oslo_vmware.api [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Waiting for the task: (returnval){ [ 1824.784839] env[63024]: value = "task-1951059" [ 1824.784839] env[63024]: _type = "Task" [ 1824.784839] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1824.793906] env[63024]: DEBUG oslo_vmware.api [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1951059, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1824.911260] env[63024]: DEBUG oslo_vmware.api [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951058, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067045} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1824.911575] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1824.912488] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae4d4e61-2cb3-4238-8e55-b1f289309866 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.934649] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Reconfiguring VM instance instance-0000003e to attach disk [datastore1] 92d1f96e-bbe7-4654-9d3a-47ba40057157/92d1f96e-bbe7-4654-9d3a-47ba40057157.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1824.934898] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eb82607b-10c8-4cc1-ae2a-57d13f6729d0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1824.953054] env[63024]: DEBUG oslo_vmware.api [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Waiting for the task: (returnval){ [ 1824.953054] env[63024]: value = "task-1951060" [ 1824.953054] env[63024]: _type = "Task" [ 1824.953054] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1824.960995] env[63024]: DEBUG oslo_vmware.api [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951060, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.120309] env[63024]: DEBUG nova.compute.manager [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1825.231225] env[63024]: DEBUG nova.compute.manager [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1825.262567] env[63024]: DEBUG nova.virt.hardware [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1825.262567] env[63024]: DEBUG nova.virt.hardware [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1825.262567] env[63024]: DEBUG nova.virt.hardware [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1825.262567] env[63024]: DEBUG nova.virt.hardware [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1825.262796] env[63024]: DEBUG nova.virt.hardware [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1825.262938] env[63024]: DEBUG nova.virt.hardware [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1825.263172] env[63024]: DEBUG nova.virt.hardware [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1825.263330] env[63024]: DEBUG nova.virt.hardware [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1825.263492] env[63024]: DEBUG nova.virt.hardware [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1825.263650] env[63024]: DEBUG nova.virt.hardware [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1825.263838] env[63024]: DEBUG nova.virt.hardware [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1825.264690] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61212b2f-9abf-4242-8ef3-cb62211d3903 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.274807] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b601b623-8cb8-4e5e-b971-c71c96866bc1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.290815] env[63024]: INFO nova.compute.manager [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] Took 45.15 seconds to build instance. [ 1825.306673] env[63024]: DEBUG oslo_vmware.api [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1951059, 'name': ReconfigVM_Task, 'duration_secs': 0.170003} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1825.307462] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Reconfigured VM instance instance-00000027 to detach disk 2000 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1825.308235] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-387c8e6c-303a-4ca8-9cf9-53844619bcf5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.331737] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Reconfiguring VM instance instance-00000027 to attach disk [datastore1] 650a97b9-911e-44b0-9e82-a6d4cc95c9dd/650a97b9-911e-44b0-9e82-a6d4cc95c9dd.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1825.331887] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-83304a1f-8daf-413c-b28b-af7ad6d8461d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.353262] env[63024]: DEBUG oslo_vmware.api [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Waiting for the task: (returnval){ [ 1825.353262] env[63024]: value = "task-1951061" [ 1825.353262] env[63024]: _type = "Task" [ 1825.353262] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.365497] env[63024]: DEBUG oslo_vmware.api [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1951061, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.464518] env[63024]: DEBUG oslo_vmware.api [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951060, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.644430] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1825.750255] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80086bc2-685f-4e66-815c-039c136ec92b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.758240] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51c494f3-9d75-4700-afc5-4f3cd8c9a2de {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.791829] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97566610-3f50-41cd-8210-44970389eed6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.799535] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7af0a591-2e39-4596-92ae-7b670bd57e34 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.803853] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0f81f397-0872-44cd-9b89-91c316de784e tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Lock "7cf0ac90-d87d-4644-8a88-da5328d1721d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 72.128s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1825.818190] env[63024]: DEBUG nova.compute.provider_tree [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1825.863086] env[63024]: DEBUG oslo_vmware.api [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1951061, 'name': ReconfigVM_Task, 'duration_secs': 0.2804} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1825.863408] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Reconfigured VM instance instance-00000027 to attach disk [datastore1] 650a97b9-911e-44b0-9e82-a6d4cc95c9dd/650a97b9-911e-44b0-9e82-a6d4cc95c9dd.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1825.864226] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ccbb53e-1d98-4815-ba8d-4eac4bab4a34 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.885024] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f38f388-ac32-4945-870b-7a6bb0e2303a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.903180] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9660e968-0261-45b7-b704-a5a6244e08e9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.922538] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0e435b5-2e92-415a-9fc0-69c9fa5d475f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.933432] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1825.933884] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4da2f618-4917-4a56-88d0-cb961c7e0753 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.943018] env[63024]: DEBUG oslo_vmware.api [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Waiting for the task: (returnval){ [ 1825.943018] env[63024]: value = "task-1951062" [ 1825.943018] env[63024]: _type = "Task" [ 1825.943018] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.946399] env[63024]: DEBUG nova.compute.manager [req-78ff64df-34e2-43c5-843e-ac0508731264 req-7a7d00d1-943b-49f9-8028-216ad8e6230d service nova] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] Received event network-vif-plugged-c38c5b06-5451-45cd-9156-009ba98e5596 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1825.946655] env[63024]: DEBUG oslo_concurrency.lockutils [req-78ff64df-34e2-43c5-843e-ac0508731264 req-7a7d00d1-943b-49f9-8028-216ad8e6230d service nova] Acquiring lock "c12774e4-77d1-4001-8d5d-0240dfed4ead-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1825.946889] env[63024]: DEBUG oslo_concurrency.lockutils [req-78ff64df-34e2-43c5-843e-ac0508731264 req-7a7d00d1-943b-49f9-8028-216ad8e6230d service nova] Lock "c12774e4-77d1-4001-8d5d-0240dfed4ead-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1825.947130] env[63024]: DEBUG oslo_concurrency.lockutils [req-78ff64df-34e2-43c5-843e-ac0508731264 req-7a7d00d1-943b-49f9-8028-216ad8e6230d service nova] Lock "c12774e4-77d1-4001-8d5d-0240dfed4ead-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1825.947327] env[63024]: DEBUG nova.compute.manager [req-78ff64df-34e2-43c5-843e-ac0508731264 req-7a7d00d1-943b-49f9-8028-216ad8e6230d service nova] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] No waiting events found dispatching network-vif-plugged-c38c5b06-5451-45cd-9156-009ba98e5596 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1825.947539] env[63024]: WARNING nova.compute.manager [req-78ff64df-34e2-43c5-843e-ac0508731264 req-7a7d00d1-943b-49f9-8028-216ad8e6230d service nova] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] Received unexpected event network-vif-plugged-c38c5b06-5451-45cd-9156-009ba98e5596 for instance with vm_state building and task_state spawning. [ 1825.953027] env[63024]: DEBUG oslo_vmware.api [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1951062, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.965305] env[63024]: DEBUG oslo_vmware.api [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951060, 'name': ReconfigVM_Task, 'duration_secs': 0.537247} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1825.965584] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Reconfigured VM instance instance-0000003e to attach disk [datastore1] 92d1f96e-bbe7-4654-9d3a-47ba40057157/92d1f96e-bbe7-4654-9d3a-47ba40057157.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1825.966245] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3a17098c-71b9-4885-8203-779176428acc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1825.972832] env[63024]: DEBUG oslo_vmware.api [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Waiting for the task: (returnval){ [ 1825.972832] env[63024]: value = "task-1951063" [ 1825.972832] env[63024]: _type = "Task" [ 1825.972832] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1825.981505] env[63024]: DEBUG oslo_vmware.api [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951063, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1825.992558] env[63024]: DEBUG nova.network.neutron [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] Successfully updated port: c38c5b06-5451-45cd-9156-009ba98e5596 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1826.308059] env[63024]: DEBUG nova.compute.manager [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1826.321873] env[63024]: DEBUG nova.scheduler.client.report [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1826.450166] env[63024]: DEBUG oslo_vmware.api [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1951062, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.483143] env[63024]: DEBUG oslo_vmware.api [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951063, 'name': Rename_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.499132] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Acquiring lock "refresh_cache-c12774e4-77d1-4001-8d5d-0240dfed4ead" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1826.499333] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Acquired lock "refresh_cache-c12774e4-77d1-4001-8d5d-0240dfed4ead" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1826.499528] env[63024]: DEBUG nova.network.neutron [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1826.827131] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.624s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1826.827788] env[63024]: DEBUG nova.compute.manager [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1826.830544] env[63024]: DEBUG oslo_concurrency.lockutils [None req-55114a38-c7d5-4672-b611-8820a7d7a086 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.140s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1826.830711] env[63024]: DEBUG nova.objects.instance [None req-55114a38-c7d5-4672-b611-8820a7d7a086 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lazy-loading 'resources' on Instance uuid 2dd20650-9273-432a-be28-73ccb66c721d {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1826.840356] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1826.952675] env[63024]: DEBUG oslo_vmware.api [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1951062, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1826.984744] env[63024]: DEBUG oslo_vmware.api [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951063, 'name': Rename_Task, 'duration_secs': 0.796427} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1826.985061] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1826.985315] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-46e7f2d2-e12b-43a0-9184-5d66d35c678a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1826.991264] env[63024]: DEBUG oslo_vmware.api [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Waiting for the task: (returnval){ [ 1826.991264] env[63024]: value = "task-1951064" [ 1826.991264] env[63024]: _type = "Task" [ 1826.991264] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1827.001060] env[63024]: DEBUG oslo_vmware.api [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951064, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.053128] env[63024]: DEBUG nova.network.neutron [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1827.267324] env[63024]: DEBUG nova.network.neutron [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] Updating instance_info_cache with network_info: [{"id": "c38c5b06-5451-45cd-9156-009ba98e5596", "address": "fa:16:3e:ec:4e:74", "network": {"id": "3ece47b4-8cf7-4658-bdd0-f49089890e34", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-339226542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a158ec715843423e8f013939e0071c71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc38c5b06-54", "ovs_interfaceid": "c38c5b06-5451-45cd-9156-009ba98e5596", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1827.338254] env[63024]: DEBUG nova.compute.utils [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1827.344961] env[63024]: DEBUG nova.compute.manager [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1827.344961] env[63024]: DEBUG nova.network.neutron [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1827.401558] env[63024]: DEBUG nova.policy [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '54b7a5c8406e44e3a00cf903bc74e48d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '99c4328f2c8c4139b4eace4b465e37e3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1827.452117] env[63024]: DEBUG oslo_vmware.api [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1951062, 'name': PowerOnVM_Task, 'duration_secs': 1.016853} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1827.454619] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1827.502312] env[63024]: DEBUG oslo_vmware.api [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951064, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.772758] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Releasing lock "refresh_cache-c12774e4-77d1-4001-8d5d-0240dfed4ead" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1827.772758] env[63024]: DEBUG nova.compute.manager [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] Instance network_info: |[{"id": "c38c5b06-5451-45cd-9156-009ba98e5596", "address": "fa:16:3e:ec:4e:74", "network": {"id": "3ece47b4-8cf7-4658-bdd0-f49089890e34", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-339226542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a158ec715843423e8f013939e0071c71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc38c5b06-54", "ovs_interfaceid": "c38c5b06-5451-45cd-9156-009ba98e5596", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1827.772758] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ec:4e:74', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0ea0fc1b-0424-46ec-bef5-6b57b7d184d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c38c5b06-5451-45cd-9156-009ba98e5596', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1827.779244] env[63024]: DEBUG oslo.service.loopingcall [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1827.781764] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1827.782536] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a60052b3-3260-48e3-853f-dd7794f0efad {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.805539] env[63024]: DEBUG nova.network.neutron [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Successfully created port: 3e52f5fe-2e1c-4296-977e-cdbb6a36291e {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1827.813468] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1827.813468] env[63024]: value = "task-1951065" [ 1827.813468] env[63024]: _type = "Task" [ 1827.813468] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1827.821996] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951065, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1827.844292] env[63024]: DEBUG nova.compute.manager [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1827.849747] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3026585-85bf-413a-a5d9-12c8a7611f72 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.858074] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7238ac8e-aca0-4ed0-a509-a34dd8c7a265 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.892143] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b59ede12-e4f1-4aac-8a14-c8d4b9f0910d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.898819] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7e5102c-f1a4-4385-9bbc-6b1f023110ae {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1827.915747] env[63024]: DEBUG nova.compute.provider_tree [None req-55114a38-c7d5-4672-b611-8820a7d7a086 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1828.002047] env[63024]: DEBUG oslo_vmware.api [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951064, 'name': PowerOnVM_Task, 'duration_secs': 0.747143} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1828.003014] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1828.003244] env[63024]: INFO nova.compute.manager [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Took 8.75 seconds to spawn the instance on the hypervisor. [ 1828.003424] env[63024]: DEBUG nova.compute.manager [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1828.004194] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcec31b7-a54d-48b5-bb8a-b74007ac199f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.105866] env[63024]: DEBUG nova.compute.manager [req-a490d71c-0747-4cd5-ac47-4a39d9ad469f req-9a03b7a1-0db9-4634-8ab6-7c775be0ad4e service nova] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] Received event network-changed-c38c5b06-5451-45cd-9156-009ba98e5596 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1828.106035] env[63024]: DEBUG nova.compute.manager [req-a490d71c-0747-4cd5-ac47-4a39d9ad469f req-9a03b7a1-0db9-4634-8ab6-7c775be0ad4e service nova] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] Refreshing instance network info cache due to event network-changed-c38c5b06-5451-45cd-9156-009ba98e5596. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1828.106243] env[63024]: DEBUG oslo_concurrency.lockutils [req-a490d71c-0747-4cd5-ac47-4a39d9ad469f req-9a03b7a1-0db9-4634-8ab6-7c775be0ad4e service nova] Acquiring lock "refresh_cache-c12774e4-77d1-4001-8d5d-0240dfed4ead" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1828.106383] env[63024]: DEBUG oslo_concurrency.lockutils [req-a490d71c-0747-4cd5-ac47-4a39d9ad469f req-9a03b7a1-0db9-4634-8ab6-7c775be0ad4e service nova] Acquired lock "refresh_cache-c12774e4-77d1-4001-8d5d-0240dfed4ead" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1828.106542] env[63024]: DEBUG nova.network.neutron [req-a490d71c-0747-4cd5-ac47-4a39d9ad469f req-9a03b7a1-0db9-4634-8ab6-7c775be0ad4e service nova] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] Refreshing network info cache for port c38c5b06-5451-45cd-9156-009ba98e5596 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1828.323514] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951065, 'name': CreateVM_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.421442] env[63024]: DEBUG nova.scheduler.client.report [None req-55114a38-c7d5-4672-b611-8820a7d7a086 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1828.465546] env[63024]: INFO nova.compute.manager [None req-85ee98db-4ba3-4928-9a49-94506279f74a tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Updating instance to original state: 'active' [ 1828.524284] env[63024]: INFO nova.compute.manager [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Took 42.93 seconds to build instance. [ 1828.812806] env[63024]: DEBUG nova.network.neutron [req-a490d71c-0747-4cd5-ac47-4a39d9ad469f req-9a03b7a1-0db9-4634-8ab6-7c775be0ad4e service nova] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] Updated VIF entry in instance network info cache for port c38c5b06-5451-45cd-9156-009ba98e5596. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1828.813112] env[63024]: DEBUG nova.network.neutron [req-a490d71c-0747-4cd5-ac47-4a39d9ad469f req-9a03b7a1-0db9-4634-8ab6-7c775be0ad4e service nova] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] Updating instance_info_cache with network_info: [{"id": "c38c5b06-5451-45cd-9156-009ba98e5596", "address": "fa:16:3e:ec:4e:74", "network": {"id": "3ece47b4-8cf7-4658-bdd0-f49089890e34", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-339226542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a158ec715843423e8f013939e0071c71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc38c5b06-54", "ovs_interfaceid": "c38c5b06-5451-45cd-9156-009ba98e5596", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1828.823826] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951065, 'name': CreateVM_Task, 'duration_secs': 0.726658} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1828.824567] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1828.825229] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1828.825389] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1828.825701] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1828.826188] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dd37e7d0-cc4e-4c64-acea-df5311c4709b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.830757] env[63024]: DEBUG oslo_vmware.api [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Waiting for the task: (returnval){ [ 1828.830757] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52f3c955-8c77-fb1d-77e6-d4025dd2d475" [ 1828.830757] env[63024]: _type = "Task" [ 1828.830757] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1828.839215] env[63024]: DEBUG oslo_vmware.api [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52f3c955-8c77-fb1d-77e6-d4025dd2d475, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1828.861134] env[63024]: DEBUG nova.compute.manager [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1828.886427] env[63024]: DEBUG nova.virt.hardware [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='d2da8c2fa18b8637f7dc6d99018f85e6',container_format='bare',created_at=2024-12-22T11:09:43Z,direct_url=,disk_format='vmdk',id=ee5d780e-10a0-4109-93a4-c4258b879f3d,min_disk=1,min_ram=0,name='tempest-test-snap-1774561000',owner='99c4328f2c8c4139b4eace4b465e37e3',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2024-12-22T11:10:01Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1828.886679] env[63024]: DEBUG nova.virt.hardware [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1828.886834] env[63024]: DEBUG nova.virt.hardware [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1828.887053] env[63024]: DEBUG nova.virt.hardware [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1828.887225] env[63024]: DEBUG nova.virt.hardware [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1828.887374] env[63024]: DEBUG nova.virt.hardware [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1828.887576] env[63024]: DEBUG nova.virt.hardware [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1828.887729] env[63024]: DEBUG nova.virt.hardware [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1828.888088] env[63024]: DEBUG nova.virt.hardware [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1828.888289] env[63024]: DEBUG nova.virt.hardware [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1828.888467] env[63024]: DEBUG nova.virt.hardware [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1828.889369] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74bc4532-ca9e-4b49-a8b8-02f52dd75a75 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.899285] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bed432a3-ae1c-4b8f-a82b-b55f1eedbfdb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1828.927594] env[63024]: DEBUG oslo_concurrency.lockutils [None req-55114a38-c7d5-4672-b611-8820a7d7a086 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.097s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1828.930041] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.042s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1828.931397] env[63024]: INFO nova.compute.claims [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1828.951541] env[63024]: INFO nova.scheduler.client.report [None req-55114a38-c7d5-4672-b611-8820a7d7a086 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Deleted allocations for instance 2dd20650-9273-432a-be28-73ccb66c721d [ 1829.025991] env[63024]: DEBUG oslo_concurrency.lockutils [None req-34696ed8-cb2b-45b6-996b-400a697d1ba2 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Lock "92d1f96e-bbe7-4654-9d3a-47ba40057157" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 74.482s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1829.317449] env[63024]: DEBUG oslo_concurrency.lockutils [req-a490d71c-0747-4cd5-ac47-4a39d9ad469f req-9a03b7a1-0db9-4634-8ab6-7c775be0ad4e service nova] Releasing lock "refresh_cache-c12774e4-77d1-4001-8d5d-0240dfed4ead" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1829.353728] env[63024]: DEBUG oslo_vmware.api [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52f3c955-8c77-fb1d-77e6-d4025dd2d475, 'name': SearchDatastore_Task, 'duration_secs': 0.028663} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1829.354095] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1829.354307] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1829.354542] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1829.354683] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1829.354857] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1829.355159] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-88f6bb6f-4126-4d3a-90ec-36c225ef5a87 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.367018] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1829.367018] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1829.367018] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f8ea1bc3-19d6-4eb3-a0f4-d363e7f2ffd3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.370478] env[63024]: DEBUG oslo_vmware.api [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Waiting for the task: (returnval){ [ 1829.370478] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b290a1-a0d7-19c7-bc45-96453ecb0d7e" [ 1829.370478] env[63024]: _type = "Task" [ 1829.370478] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1829.378146] env[63024]: DEBUG oslo_vmware.api [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b290a1-a0d7-19c7-bc45-96453ecb0d7e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1829.459896] env[63024]: DEBUG oslo_concurrency.lockutils [None req-55114a38-c7d5-4672-b611-8820a7d7a086 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "2dd20650-9273-432a-be28-73ccb66c721d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.802s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1829.529820] env[63024]: DEBUG nova.compute.manager [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1829.732195] env[63024]: DEBUG nova.network.neutron [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Successfully updated port: 3e52f5fe-2e1c-4296-977e-cdbb6a36291e {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1829.762363] env[63024]: DEBUG oslo_concurrency.lockutils [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Acquiring lock "9edbda30-2e28-4961-a6ad-5ab34c40ed44" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1829.762994] env[63024]: DEBUG oslo_concurrency.lockutils [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Lock "9edbda30-2e28-4961-a6ad-5ab34c40ed44" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1829.881314] env[63024]: DEBUG oslo_vmware.api [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b290a1-a0d7-19c7-bc45-96453ecb0d7e, 'name': SearchDatastore_Task, 'duration_secs': 0.009154} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1829.881856] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e92adde1-1aba-4343-ada8-70e9470349cb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1829.889082] env[63024]: DEBUG oslo_vmware.api [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Waiting for the task: (returnval){ [ 1829.889082] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52438a04-19d9-0243-b357-10e4c4d026e3" [ 1829.889082] env[63024]: _type = "Task" [ 1829.889082] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1829.896381] env[63024]: DEBUG oslo_vmware.api [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52438a04-19d9-0243-b357-10e4c4d026e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.054241] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1830.132515] env[63024]: DEBUG nova.compute.manager [req-295ea656-408e-4ae2-bf9b-6f4dd6a2c734 req-431ac337-be52-49bd-bd07-9b2c06593f76 service nova] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Received event network-vif-plugged-3e52f5fe-2e1c-4296-977e-cdbb6a36291e {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1830.132515] env[63024]: DEBUG oslo_concurrency.lockutils [req-295ea656-408e-4ae2-bf9b-6f4dd6a2c734 req-431ac337-be52-49bd-bd07-9b2c06593f76 service nova] Acquiring lock "9bf1316e-f1ae-426e-a0a2-d814a2460c4d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1830.132515] env[63024]: DEBUG oslo_concurrency.lockutils [req-295ea656-408e-4ae2-bf9b-6f4dd6a2c734 req-431ac337-be52-49bd-bd07-9b2c06593f76 service nova] Lock "9bf1316e-f1ae-426e-a0a2-d814a2460c4d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1830.132614] env[63024]: DEBUG oslo_concurrency.lockutils [req-295ea656-408e-4ae2-bf9b-6f4dd6a2c734 req-431ac337-be52-49bd-bd07-9b2c06593f76 service nova] Lock "9bf1316e-f1ae-426e-a0a2-d814a2460c4d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1830.132752] env[63024]: DEBUG nova.compute.manager [req-295ea656-408e-4ae2-bf9b-6f4dd6a2c734 req-431ac337-be52-49bd-bd07-9b2c06593f76 service nova] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] No waiting events found dispatching network-vif-plugged-3e52f5fe-2e1c-4296-977e-cdbb6a36291e {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1830.133044] env[63024]: WARNING nova.compute.manager [req-295ea656-408e-4ae2-bf9b-6f4dd6a2c734 req-431ac337-be52-49bd-bd07-9b2c06593f76 service nova] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Received unexpected event network-vif-plugged-3e52f5fe-2e1c-4296-977e-cdbb6a36291e for instance with vm_state building and task_state spawning. [ 1830.134032] env[63024]: DEBUG nova.compute.manager [req-295ea656-408e-4ae2-bf9b-6f4dd6a2c734 req-431ac337-be52-49bd-bd07-9b2c06593f76 service nova] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Received event network-changed-3e52f5fe-2e1c-4296-977e-cdbb6a36291e {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1830.134032] env[63024]: DEBUG nova.compute.manager [req-295ea656-408e-4ae2-bf9b-6f4dd6a2c734 req-431ac337-be52-49bd-bd07-9b2c06593f76 service nova] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Refreshing instance network info cache due to event network-changed-3e52f5fe-2e1c-4296-977e-cdbb6a36291e. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1830.134032] env[63024]: DEBUG oslo_concurrency.lockutils [req-295ea656-408e-4ae2-bf9b-6f4dd6a2c734 req-431ac337-be52-49bd-bd07-9b2c06593f76 service nova] Acquiring lock "refresh_cache-9bf1316e-f1ae-426e-a0a2-d814a2460c4d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1830.134032] env[63024]: DEBUG oslo_concurrency.lockutils [req-295ea656-408e-4ae2-bf9b-6f4dd6a2c734 req-431ac337-be52-49bd-bd07-9b2c06593f76 service nova] Acquired lock "refresh_cache-9bf1316e-f1ae-426e-a0a2-d814a2460c4d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1830.134032] env[63024]: DEBUG nova.network.neutron [req-295ea656-408e-4ae2-bf9b-6f4dd6a2c734 req-431ac337-be52-49bd-bd07-9b2c06593f76 service nova] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Refreshing network info cache for port 3e52f5fe-2e1c-4296-977e-cdbb6a36291e {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1830.235716] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquiring lock "refresh_cache-9bf1316e-f1ae-426e-a0a2-d814a2460c4d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1830.277688] env[63024]: DEBUG oslo_concurrency.lockutils [None req-966c1bab-5142-4c8f-95bf-1ae15db2acaa tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Acquiring lock "650a97b9-911e-44b0-9e82-a6d4cc95c9dd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1830.278015] env[63024]: DEBUG oslo_concurrency.lockutils [None req-966c1bab-5142-4c8f-95bf-1ae15db2acaa tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Lock "650a97b9-911e-44b0-9e82-a6d4cc95c9dd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1830.278289] env[63024]: DEBUG oslo_concurrency.lockutils [None req-966c1bab-5142-4c8f-95bf-1ae15db2acaa tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Acquiring lock "650a97b9-911e-44b0-9e82-a6d4cc95c9dd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1830.278532] env[63024]: DEBUG oslo_concurrency.lockutils [None req-966c1bab-5142-4c8f-95bf-1ae15db2acaa tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Lock "650a97b9-911e-44b0-9e82-a6d4cc95c9dd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1830.278795] env[63024]: DEBUG oslo_concurrency.lockutils [None req-966c1bab-5142-4c8f-95bf-1ae15db2acaa tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Lock "650a97b9-911e-44b0-9e82-a6d4cc95c9dd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1830.281923] env[63024]: INFO nova.compute.manager [None req-966c1bab-5142-4c8f-95bf-1ae15db2acaa tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Terminating instance [ 1830.400540] env[63024]: DEBUG oslo_vmware.api [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52438a04-19d9-0243-b357-10e4c4d026e3, 'name': SearchDatastore_Task, 'duration_secs': 0.010331} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1830.403412] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1830.403736] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] c12774e4-77d1-4001-8d5d-0240dfed4ead/c12774e4-77d1-4001-8d5d-0240dfed4ead.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1830.404247] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c343cb18-bbc8-46fc-8b10-b9f9fed20df0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.412807] env[63024]: DEBUG oslo_vmware.api [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Waiting for the task: (returnval){ [ 1830.412807] env[63024]: value = "task-1951066" [ 1830.412807] env[63024]: _type = "Task" [ 1830.412807] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1830.424619] env[63024]: DEBUG oslo_vmware.api [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951066, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.447643] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-911979db-23e3-4281-b09a-29992290e100 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.455640] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5b2ec5e-3cc6-447b-90bd-fa7942d823ad {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.488882] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e72d61a2-0f47-4175-be4f-84ec9ae3ef1c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.496715] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b25d299-991d-489f-b55f-b5629702b1a8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.512376] env[63024]: DEBUG nova.compute.provider_tree [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1830.666136] env[63024]: DEBUG nova.network.neutron [req-295ea656-408e-4ae2-bf9b-6f4dd6a2c734 req-431ac337-be52-49bd-bd07-9b2c06593f76 service nova] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1830.762318] env[63024]: DEBUG nova.network.neutron [req-295ea656-408e-4ae2-bf9b-6f4dd6a2c734 req-431ac337-be52-49bd-bd07-9b2c06593f76 service nova] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1830.791630] env[63024]: DEBUG nova.compute.manager [None req-966c1bab-5142-4c8f-95bf-1ae15db2acaa tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1830.791899] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-966c1bab-5142-4c8f-95bf-1ae15db2acaa tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1830.793223] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c412d1ce-ea65-4950-b277-6cd8b3bf211f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.804304] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-966c1bab-5142-4c8f-95bf-1ae15db2acaa tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1830.804694] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-239f3601-d728-49fb-91ce-863883e65ef3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1830.815260] env[63024]: DEBUG oslo_vmware.api [None req-966c1bab-5142-4c8f-95bf-1ae15db2acaa tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Waiting for the task: (returnval){ [ 1830.815260] env[63024]: value = "task-1951067" [ 1830.815260] env[63024]: _type = "Task" [ 1830.815260] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1830.825470] env[63024]: DEBUG oslo_vmware.api [None req-966c1bab-5142-4c8f-95bf-1ae15db2acaa tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1951067, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1830.922621] env[63024]: DEBUG oslo_vmware.api [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951066, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.015999] env[63024]: DEBUG nova.scheduler.client.report [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1831.264865] env[63024]: DEBUG oslo_concurrency.lockutils [req-295ea656-408e-4ae2-bf9b-6f4dd6a2c734 req-431ac337-be52-49bd-bd07-9b2c06593f76 service nova] Releasing lock "refresh_cache-9bf1316e-f1ae-426e-a0a2-d814a2460c4d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1831.265339] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquired lock "refresh_cache-9bf1316e-f1ae-426e-a0a2-d814a2460c4d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1831.265522] env[63024]: DEBUG nova.network.neutron [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1831.325495] env[63024]: DEBUG oslo_vmware.api [None req-966c1bab-5142-4c8f-95bf-1ae15db2acaa tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1951067, 'name': PowerOffVM_Task, 'duration_secs': 0.366742} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1831.325759] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-966c1bab-5142-4c8f-95bf-1ae15db2acaa tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1831.325960] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-966c1bab-5142-4c8f-95bf-1ae15db2acaa tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1831.326255] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c0db4afb-adbd-4311-b768-5eb6c94150c5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.404241] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-966c1bab-5142-4c8f-95bf-1ae15db2acaa tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1831.404693] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-966c1bab-5142-4c8f-95bf-1ae15db2acaa tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1831.405062] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-966c1bab-5142-4c8f-95bf-1ae15db2acaa tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Deleting the datastore file [datastore1] 650a97b9-911e-44b0-9e82-a6d4cc95c9dd {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1831.405911] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cb3aedc5-cc32-4080-99d6-2aab9d7d5b7c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.413807] env[63024]: DEBUG oslo_vmware.api [None req-966c1bab-5142-4c8f-95bf-1ae15db2acaa tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Waiting for the task: (returnval){ [ 1831.413807] env[63024]: value = "task-1951069" [ 1831.413807] env[63024]: _type = "Task" [ 1831.413807] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1831.428016] env[63024]: DEBUG oslo_vmware.api [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951066, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.643999} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1831.432386] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] c12774e4-77d1-4001-8d5d-0240dfed4ead/c12774e4-77d1-4001-8d5d-0240dfed4ead.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1831.432705] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1831.432991] env[63024]: DEBUG oslo_vmware.api [None req-966c1bab-5142-4c8f-95bf-1ae15db2acaa tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1951069, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.433294] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1cff695d-480b-441d-8442-6a5ecef2caa7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.440279] env[63024]: DEBUG oslo_vmware.api [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Waiting for the task: (returnval){ [ 1831.440279] env[63024]: value = "task-1951070" [ 1831.440279] env[63024]: _type = "Task" [ 1831.440279] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1831.449226] env[63024]: DEBUG oslo_vmware.api [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951070, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.521447] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.591s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1831.522052] env[63024]: DEBUG nova.compute.manager [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1831.529722] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 22.133s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1831.529955] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1831.530134] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63024) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1831.530466] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2d75601a-2fc2-445b-a169-a37988d8d4ce tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.415s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1831.530696] env[63024]: DEBUG nova.objects.instance [None req-2d75601a-2fc2-445b-a169-a37988d8d4ce tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Lazy-loading 'resources' on Instance uuid 02db92ec-3377-406b-a95c-0022579fa75b {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1831.533203] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26574288-d508-4aa6-bcfd-15350b1e1e54 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.543114] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdbf7baa-b11f-4b4c-9e4c-679c0be2ceda {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.563022] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e89e6c31-b6be-404b-8b4d-20e870bbd901 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.571841] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a320a00-11d7-416a-a704-f34efb997405 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.608261] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179624MB free_disk=169GB free_vcpus=48 pci_devices=None {{(pid=63024) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1831.608474] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1831.824016] env[63024]: DEBUG nova.network.neutron [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1831.933062] env[63024]: DEBUG oslo_vmware.api [None req-966c1bab-5142-4c8f-95bf-1ae15db2acaa tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1951069, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1831.948959] env[63024]: DEBUG oslo_vmware.api [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951070, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.357019} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1831.949260] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1831.950074] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78047d12-e9e4-41e8-8f12-83dab865fdae {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.971763] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] Reconfiguring VM instance instance-0000003f to attach disk [datastore1] c12774e4-77d1-4001-8d5d-0240dfed4ead/c12774e4-77d1-4001-8d5d-0240dfed4ead.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1831.972085] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5d94565b-cd76-4dbe-85cb-e67dcc400714 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.991126] env[63024]: DEBUG oslo_vmware.api [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Waiting for the task: (returnval){ [ 1831.991126] env[63024]: value = "task-1951071" [ 1831.991126] env[63024]: _type = "Task" [ 1831.991126] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1831.999156] env[63024]: DEBUG oslo_vmware.api [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951071, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.031267] env[63024]: DEBUG nova.compute.utils [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1832.032988] env[63024]: DEBUG nova.compute.manager [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1832.033186] env[63024]: DEBUG nova.network.neutron [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1832.063579] env[63024]: DEBUG nova.network.neutron [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Updating instance_info_cache with network_info: [{"id": "3e52f5fe-2e1c-4296-977e-cdbb6a36291e", "address": "fa:16:3e:fc:37:39", "network": {"id": "0719de66-1f31-4596-a9a1-11d65b13c2e5", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1221667646-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "99c4328f2c8c4139b4eace4b465e37e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e0c77754-4085-434b-a3e8-d61be099ac67", "external-id": "nsx-vlan-transportzone-822", "segmentation_id": 822, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e52f5fe-2e", "ovs_interfaceid": "3e52f5fe-2e1c-4296-977e-cdbb6a36291e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1832.073874] env[63024]: DEBUG nova.policy [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '27151e89c8ee4ddd9285bff3795a82b2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0e53c02ad56640dc8cbc8839669b67bf', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1832.358242] env[63024]: DEBUG oslo_concurrency.lockutils [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "c28e7c21-7e7d-4cda-81e8-63538bd8a1f7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1832.358242] env[63024]: DEBUG oslo_concurrency.lockutils [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "c28e7c21-7e7d-4cda-81e8-63538bd8a1f7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1832.368179] env[63024]: DEBUG nova.network.neutron [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Successfully created port: a86b5113-d05e-45ac-bf54-833ea769eae5 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1832.433049] env[63024]: DEBUG oslo_vmware.api [None req-966c1bab-5142-4c8f-95bf-1ae15db2acaa tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1951069, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.767413} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1832.433324] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-966c1bab-5142-4c8f-95bf-1ae15db2acaa tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1832.433504] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-966c1bab-5142-4c8f-95bf-1ae15db2acaa tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1832.433679] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-966c1bab-5142-4c8f-95bf-1ae15db2acaa tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1832.433848] env[63024]: INFO nova.compute.manager [None req-966c1bab-5142-4c8f-95bf-1ae15db2acaa tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Took 1.64 seconds to destroy the instance on the hypervisor. [ 1832.434112] env[63024]: DEBUG oslo.service.loopingcall [None req-966c1bab-5142-4c8f-95bf-1ae15db2acaa tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1832.434316] env[63024]: DEBUG nova.compute.manager [-] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1832.434410] env[63024]: DEBUG nova.network.neutron [-] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1832.503153] env[63024]: DEBUG oslo_vmware.api [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951071, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.536866] env[63024]: DEBUG nova.compute.manager [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1832.568481] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Releasing lock "refresh_cache-9bf1316e-f1ae-426e-a0a2-d814a2460c4d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1832.568810] env[63024]: DEBUG nova.compute.manager [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Instance network_info: |[{"id": "3e52f5fe-2e1c-4296-977e-cdbb6a36291e", "address": "fa:16:3e:fc:37:39", "network": {"id": "0719de66-1f31-4596-a9a1-11d65b13c2e5", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1221667646-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "99c4328f2c8c4139b4eace4b465e37e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e0c77754-4085-434b-a3e8-d61be099ac67", "external-id": "nsx-vlan-transportzone-822", "segmentation_id": 822, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3e52f5fe-2e", "ovs_interfaceid": "3e52f5fe-2e1c-4296-977e-cdbb6a36291e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1832.569259] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fc:37:39', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e0c77754-4085-434b-a3e8-d61be099ac67', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3e52f5fe-2e1c-4296-977e-cdbb6a36291e', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1832.585345] env[63024]: DEBUG oslo.service.loopingcall [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1832.585345] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1832.585345] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9ba24076-b02e-455b-a1e1-c34c3e12575a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.598310] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a73222e-7842-48c9-823d-f4b90e183cba {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.607547] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5b35f6d-4166-4353-b833-d05ce276c342 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.610820] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1832.610820] env[63024]: value = "task-1951072" [ 1832.610820] env[63024]: _type = "Task" [ 1832.610820] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1832.648167] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6ab64b5-35fc-43c7-8d6f-22afc2611ff5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.654624] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951072, 'name': CreateVM_Task} progress is 15%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1832.661712] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5db7bb49-cab8-4027-a442-491352cb13cf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1832.677599] env[63024]: DEBUG nova.compute.provider_tree [None req-2d75601a-2fc2-445b-a169-a37988d8d4ce tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1832.787756] env[63024]: DEBUG nova.compute.manager [req-169399c9-2f42-48d9-b587-631d60225759 req-5d7d789c-2792-48a6-bf0f-9488f16c2a24 service nova] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Received event network-vif-deleted-241606ef-afe1-4ca8-912c-dae7639e4941 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1832.787975] env[63024]: INFO nova.compute.manager [req-169399c9-2f42-48d9-b587-631d60225759 req-5d7d789c-2792-48a6-bf0f-9488f16c2a24 service nova] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Neutron deleted interface 241606ef-afe1-4ca8-912c-dae7639e4941; detaching it from the instance and deleting it from the info cache [ 1832.788165] env[63024]: DEBUG nova.network.neutron [req-169399c9-2f42-48d9-b587-631d60225759 req-5d7d789c-2792-48a6-bf0f-9488f16c2a24 service nova] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1833.005314] env[63024]: DEBUG oslo_vmware.api [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951071, 'name': ReconfigVM_Task, 'duration_secs': 0.709045} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1833.005694] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] Reconfigured VM instance instance-0000003f to attach disk [datastore1] c12774e4-77d1-4001-8d5d-0240dfed4ead/c12774e4-77d1-4001-8d5d-0240dfed4ead.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1833.006322] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f9e8d3c0-cc27-434a-bf7c-9c38b8a967c9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.012502] env[63024]: DEBUG oslo_vmware.api [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Waiting for the task: (returnval){ [ 1833.012502] env[63024]: value = "task-1951073" [ 1833.012502] env[63024]: _type = "Task" [ 1833.012502] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1833.020472] env[63024]: DEBUG oslo_vmware.api [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951073, 'name': Rename_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.120960] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951072, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.180809] env[63024]: DEBUG nova.scheduler.client.report [None req-2d75601a-2fc2-445b-a169-a37988d8d4ce tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1833.257859] env[63024]: DEBUG nova.network.neutron [-] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1833.291185] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dcf0f9e6-0553-46a3-aa42-e405512a9a62 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.301485] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64d011d9-5a1d-429e-9ea2-b78de6969624 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.336829] env[63024]: DEBUG nova.compute.manager [req-169399c9-2f42-48d9-b587-631d60225759 req-5d7d789c-2792-48a6-bf0f-9488f16c2a24 service nova] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Detach interface failed, port_id=241606ef-afe1-4ca8-912c-dae7639e4941, reason: Instance 650a97b9-911e-44b0-9e82-a6d4cc95c9dd could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 1833.523117] env[63024]: DEBUG oslo_vmware.api [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951073, 'name': Rename_Task, 'duration_secs': 0.202412} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1833.523463] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1833.523749] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0c3fa69c-68ee-4c4b-8a47-ab514ede22b4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.529944] env[63024]: DEBUG oslo_vmware.api [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Waiting for the task: (returnval){ [ 1833.529944] env[63024]: value = "task-1951074" [ 1833.529944] env[63024]: _type = "Task" [ 1833.529944] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1833.539368] env[63024]: DEBUG oslo_vmware.api [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951074, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.546791] env[63024]: DEBUG nova.compute.manager [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1833.572064] env[63024]: DEBUG nova.virt.hardware [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1833.572328] env[63024]: DEBUG nova.virt.hardware [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1833.572491] env[63024]: DEBUG nova.virt.hardware [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1833.572674] env[63024]: DEBUG nova.virt.hardware [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1833.572816] env[63024]: DEBUG nova.virt.hardware [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1833.572959] env[63024]: DEBUG nova.virt.hardware [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1833.573185] env[63024]: DEBUG nova.virt.hardware [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1833.573437] env[63024]: DEBUG nova.virt.hardware [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1833.573511] env[63024]: DEBUG nova.virt.hardware [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1833.573672] env[63024]: DEBUG nova.virt.hardware [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1833.573861] env[63024]: DEBUG nova.virt.hardware [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1833.574733] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-581e0cc3-7093-4f34-a265-d865d95c07e4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.582684] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d26c0a2-74e2-47e5-899c-01f186217cb8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.620677] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951072, 'name': CreateVM_Task, 'duration_secs': 0.670036} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1833.620847] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1833.621589] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ee5d780e-10a0-4109-93a4-c4258b879f3d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1833.622175] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ee5d780e-10a0-4109-93a4-c4258b879f3d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1833.622677] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/ee5d780e-10a0-4109-93a4-c4258b879f3d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1833.622948] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01a8247b-4418-41d5-9318-d8a72645df55 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1833.627404] env[63024]: DEBUG oslo_vmware.api [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1833.627404] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52ec1ad7-ca0b-e78a-0707-ef01c724a137" [ 1833.627404] env[63024]: _type = "Task" [ 1833.627404] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1833.634798] env[63024]: DEBUG oslo_vmware.api [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52ec1ad7-ca0b-e78a-0707-ef01c724a137, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1833.686034] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2d75601a-2fc2-445b-a169-a37988d8d4ce tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.155s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1833.688264] env[63024]: DEBUG oslo_concurrency.lockutils [None req-80512235-9f10-4ac5-a6da-8030b793b94d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.964s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1833.688501] env[63024]: DEBUG nova.objects.instance [None req-80512235-9f10-4ac5-a6da-8030b793b94d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lazy-loading 'resources' on Instance uuid 6156ce17-3f29-487a-afc5-2fa0fb7f114c {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1833.707247] env[63024]: INFO nova.scheduler.client.report [None req-2d75601a-2fc2-445b-a169-a37988d8d4ce tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Deleted allocations for instance 02db92ec-3377-406b-a95c-0022579fa75b [ 1833.760904] env[63024]: INFO nova.compute.manager [-] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Took 1.33 seconds to deallocate network for instance. [ 1833.878021] env[63024]: DEBUG nova.compute.manager [req-38d3ba63-bef1-44d6-9974-fbf051f2f248 req-9f736ce0-8dd8-4ac4-9443-8ceb82a9c331 service nova] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Received event network-vif-plugged-a86b5113-d05e-45ac-bf54-833ea769eae5 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1833.878021] env[63024]: DEBUG oslo_concurrency.lockutils [req-38d3ba63-bef1-44d6-9974-fbf051f2f248 req-9f736ce0-8dd8-4ac4-9443-8ceb82a9c331 service nova] Acquiring lock "fe6847e2-a742-4338-983f-698c13aaefde-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1833.878021] env[63024]: DEBUG oslo_concurrency.lockutils [req-38d3ba63-bef1-44d6-9974-fbf051f2f248 req-9f736ce0-8dd8-4ac4-9443-8ceb82a9c331 service nova] Lock "fe6847e2-a742-4338-983f-698c13aaefde-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1833.878021] env[63024]: DEBUG oslo_concurrency.lockutils [req-38d3ba63-bef1-44d6-9974-fbf051f2f248 req-9f736ce0-8dd8-4ac4-9443-8ceb82a9c331 service nova] Lock "fe6847e2-a742-4338-983f-698c13aaefde-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1833.878021] env[63024]: DEBUG nova.compute.manager [req-38d3ba63-bef1-44d6-9974-fbf051f2f248 req-9f736ce0-8dd8-4ac4-9443-8ceb82a9c331 service nova] [instance: fe6847e2-a742-4338-983f-698c13aaefde] No waiting events found dispatching network-vif-plugged-a86b5113-d05e-45ac-bf54-833ea769eae5 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1833.878021] env[63024]: WARNING nova.compute.manager [req-38d3ba63-bef1-44d6-9974-fbf051f2f248 req-9f736ce0-8dd8-4ac4-9443-8ceb82a9c331 service nova] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Received unexpected event network-vif-plugged-a86b5113-d05e-45ac-bf54-833ea769eae5 for instance with vm_state building and task_state spawning. [ 1833.970966] env[63024]: DEBUG nova.network.neutron [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Successfully updated port: a86b5113-d05e-45ac-bf54-833ea769eae5 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1834.039752] env[63024]: DEBUG oslo_vmware.api [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951074, 'name': PowerOnVM_Task} progress is 78%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.138758] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ee5d780e-10a0-4109-93a4-c4258b879f3d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1834.139068] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Processing image ee5d780e-10a0-4109-93a4-c4258b879f3d {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1834.139324] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/ee5d780e-10a0-4109-93a4-c4258b879f3d/ee5d780e-10a0-4109-93a4-c4258b879f3d.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1834.139473] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquired lock "[datastore1] devstack-image-cache_base/ee5d780e-10a0-4109-93a4-c4258b879f3d/ee5d780e-10a0-4109-93a4-c4258b879f3d.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1834.139650] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1834.139903] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-66ffc13f-5edb-4ffe-81d2-4d64268c1040 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.156065] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1834.156317] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1834.157199] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0a12939-d76c-4b7d-a911-2849980a6048 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.162411] env[63024]: DEBUG oslo_vmware.api [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1834.162411] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]529594b5-988c-1288-4cb3-89147e47f09a" [ 1834.162411] env[63024]: _type = "Task" [ 1834.162411] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1834.173019] env[63024]: DEBUG oslo_vmware.api [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]529594b5-988c-1288-4cb3-89147e47f09a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1834.216174] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2d75601a-2fc2-445b-a169-a37988d8d4ce tempest-ServerMetadataNegativeTestJSON-895851560 tempest-ServerMetadataNegativeTestJSON-895851560-project-member] Lock "02db92ec-3377-406b-a95c-0022579fa75b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.659s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1834.267448] env[63024]: DEBUG oslo_concurrency.lockutils [None req-966c1bab-5142-4c8f-95bf-1ae15db2acaa tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1834.477625] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "refresh_cache-fe6847e2-a742-4338-983f-698c13aaefde" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1834.477625] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquired lock "refresh_cache-fe6847e2-a742-4338-983f-698c13aaefde" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1834.477625] env[63024]: DEBUG nova.network.neutron [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1834.542954] env[63024]: DEBUG oslo_vmware.api [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951074, 'name': PowerOnVM_Task, 'duration_secs': 0.77388} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1834.544055] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1834.544055] env[63024]: INFO nova.compute.manager [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] Took 9.31 seconds to spawn the instance on the hypervisor. [ 1834.544055] env[63024]: DEBUG nova.compute.manager [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1834.544771] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c5fa7e5-9a6c-4f19-94c3-7bc905c43b40 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.674693] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Preparing fetch location {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1834.675070] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Fetch image to [datastore1] OSTACK_IMG_e82cee42-ae34-4594-b151-ce7311f36e13/OSTACK_IMG_e82cee42-ae34-4594-b151-ce7311f36e13.vmdk {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1834.675304] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Downloading stream optimized image ee5d780e-10a0-4109-93a4-c4258b879f3d to [datastore1] OSTACK_IMG_e82cee42-ae34-4594-b151-ce7311f36e13/OSTACK_IMG_e82cee42-ae34-4594-b151-ce7311f36e13.vmdk on the data store datastore1 as vApp {{(pid=63024) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1834.675517] env[63024]: DEBUG nova.virt.vmwareapi.images [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Downloading image file data ee5d780e-10a0-4109-93a4-c4258b879f3d to the ESX as VM named 'OSTACK_IMG_e82cee42-ae34-4594-b151-ce7311f36e13' {{(pid=63024) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1834.738412] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54c19c14-a7b6-48df-9373-74a74bdc21f8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.748168] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a5ae7a4-8eb2-4f13-b0fb-d0b05ea4328c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.784808] env[63024]: DEBUG oslo_vmware.rw_handles [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1834.784808] env[63024]: value = "resgroup-9" [ 1834.784808] env[63024]: _type = "ResourcePool" [ 1834.784808] env[63024]: }. {{(pid=63024) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1834.785611] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c225fc24-8df3-4025-aeec-7d14a155a23b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.788172] env[63024]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-30689ada-41aa-4b17-9467-238d55abb910 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.808934] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-877a5931-42c4-4e00-acde-0c24f33da73d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1834.814065] env[63024]: DEBUG oslo_vmware.rw_handles [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lease: (returnval){ [ 1834.814065] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]521bf8ba-d2cf-3a03-65ee-7678e4883efa" [ 1834.814065] env[63024]: _type = "HttpNfcLease" [ 1834.814065] env[63024]: } obtained for vApp import into resource pool (val){ [ 1834.814065] env[63024]: value = "resgroup-9" [ 1834.814065] env[63024]: _type = "ResourcePool" [ 1834.814065] env[63024]: }. {{(pid=63024) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1834.814340] env[63024]: DEBUG oslo_vmware.api [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the lease: (returnval){ [ 1834.814340] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]521bf8ba-d2cf-3a03-65ee-7678e4883efa" [ 1834.814340] env[63024]: _type = "HttpNfcLease" [ 1834.814340] env[63024]: } to be ready. {{(pid=63024) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1834.825302] env[63024]: DEBUG nova.compute.provider_tree [None req-80512235-9f10-4ac5-a6da-8030b793b94d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1834.832698] env[63024]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1834.832698] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]521bf8ba-d2cf-3a03-65ee-7678e4883efa" [ 1834.832698] env[63024]: _type = "HttpNfcLease" [ 1834.832698] env[63024]: } is initializing. {{(pid=63024) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1835.023684] env[63024]: DEBUG nova.network.neutron [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1835.065575] env[63024]: INFO nova.compute.manager [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] Took 44.09 seconds to build instance. [ 1835.243387] env[63024]: DEBUG nova.network.neutron [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Updating instance_info_cache with network_info: [{"id": "a86b5113-d05e-45ac-bf54-833ea769eae5", "address": "fa:16:3e:7e:0d:a2", "network": {"id": "27687c23-521d-4beb-ad4f-278994149c2c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-381619610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e53c02ad56640dc8cbc8839669b67bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "09bf081b-cdf0-4977-abe2-2339a87409ab", "external-id": "nsx-vlan-transportzone-378", "segmentation_id": 378, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa86b5113-d0", "ovs_interfaceid": "a86b5113-d05e-45ac-bf54-833ea769eae5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1835.324789] env[63024]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1835.324789] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]521bf8ba-d2cf-3a03-65ee-7678e4883efa" [ 1835.324789] env[63024]: _type = "HttpNfcLease" [ 1835.324789] env[63024]: } is ready. {{(pid=63024) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1835.325114] env[63024]: DEBUG oslo_vmware.rw_handles [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1835.325114] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]521bf8ba-d2cf-3a03-65ee-7678e4883efa" [ 1835.325114] env[63024]: _type = "HttpNfcLease" [ 1835.325114] env[63024]: }. {{(pid=63024) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1835.325832] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65c24950-a75c-4859-9a71-cc35dd1bd5e8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.328752] env[63024]: DEBUG nova.scheduler.client.report [None req-80512235-9f10-4ac5-a6da-8030b793b94d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1835.339558] env[63024]: DEBUG oslo_vmware.rw_handles [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52580dd9-bb5a-fbcc-e286-c6815f165a0a/disk-0.vmdk from lease info. {{(pid=63024) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1835.339639] env[63024]: DEBUG oslo_vmware.rw_handles [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52580dd9-bb5a-fbcc-e286-c6815f165a0a/disk-0.vmdk. {{(pid=63024) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1835.411207] env[63024]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-5357c13d-9976-4a39-b80d-f1ccac8fe9be {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.568720] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c47cc5a8-b1a7-47b6-93fc-add368334401 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Lock "c12774e4-77d1-4001-8d5d-0240dfed4ead" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 80.698s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1835.747936] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Releasing lock "refresh_cache-fe6847e2-a742-4338-983f-698c13aaefde" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1835.748665] env[63024]: DEBUG nova.compute.manager [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Instance network_info: |[{"id": "a86b5113-d05e-45ac-bf54-833ea769eae5", "address": "fa:16:3e:7e:0d:a2", "network": {"id": "27687c23-521d-4beb-ad4f-278994149c2c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-381619610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e53c02ad56640dc8cbc8839669b67bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "09bf081b-cdf0-4977-abe2-2339a87409ab", "external-id": "nsx-vlan-transportzone-378", "segmentation_id": 378, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa86b5113-d0", "ovs_interfaceid": "a86b5113-d05e-45ac-bf54-833ea769eae5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1835.748665] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7e:0d:a2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '09bf081b-cdf0-4977-abe2-2339a87409ab', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a86b5113-d05e-45ac-bf54-833ea769eae5', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1835.756557] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Creating folder: Project (0e53c02ad56640dc8cbc8839669b67bf). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1835.759429] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bbf41a91-96d9-42d3-86ef-cb84397d435b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.770680] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Created folder: Project (0e53c02ad56640dc8cbc8839669b67bf) in parent group-v401959. [ 1835.770885] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Creating folder: Instances. Parent ref: group-v402139. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1835.772625] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c226a69f-fbfd-430c-9131-9002cd77d076 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.783272] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Created folder: Instances in parent group-v402139. [ 1835.783554] env[63024]: DEBUG oslo.service.loopingcall [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1835.785679] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1835.785679] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-11b754ad-705e-48b8-be7c-df45950707ef {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1835.808379] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1835.808379] env[63024]: value = "task-1951078" [ 1835.808379] env[63024]: _type = "Task" [ 1835.808379] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1835.817642] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951078, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1835.834238] env[63024]: DEBUG oslo_concurrency.lockutils [None req-80512235-9f10-4ac5-a6da-8030b793b94d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.146s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1835.836530] env[63024]: DEBUG oslo_concurrency.lockutils [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.347s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1835.838166] env[63024]: INFO nova.compute.claims [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1835.868650] env[63024]: INFO nova.scheduler.client.report [None req-80512235-9f10-4ac5-a6da-8030b793b94d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Deleted allocations for instance 6156ce17-3f29-487a-afc5-2fa0fb7f114c [ 1835.971189] env[63024]: DEBUG nova.compute.manager [req-791a07d7-381b-4d86-a95b-614fec0da8ba req-b8dc86f2-3710-4bd3-ad16-ca6e14a023ad service nova] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Received event network-changed-a86b5113-d05e-45ac-bf54-833ea769eae5 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1835.971189] env[63024]: DEBUG nova.compute.manager [req-791a07d7-381b-4d86-a95b-614fec0da8ba req-b8dc86f2-3710-4bd3-ad16-ca6e14a023ad service nova] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Refreshing instance network info cache due to event network-changed-a86b5113-d05e-45ac-bf54-833ea769eae5. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1835.971189] env[63024]: DEBUG oslo_concurrency.lockutils [req-791a07d7-381b-4d86-a95b-614fec0da8ba req-b8dc86f2-3710-4bd3-ad16-ca6e14a023ad service nova] Acquiring lock "refresh_cache-fe6847e2-a742-4338-983f-698c13aaefde" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1835.971189] env[63024]: DEBUG oslo_concurrency.lockutils [req-791a07d7-381b-4d86-a95b-614fec0da8ba req-b8dc86f2-3710-4bd3-ad16-ca6e14a023ad service nova] Acquired lock "refresh_cache-fe6847e2-a742-4338-983f-698c13aaefde" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1835.971189] env[63024]: DEBUG nova.network.neutron [req-791a07d7-381b-4d86-a95b-614fec0da8ba req-b8dc86f2-3710-4bd3-ad16-ca6e14a023ad service nova] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Refreshing network info cache for port a86b5113-d05e-45ac-bf54-833ea769eae5 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1836.072990] env[63024]: DEBUG nova.compute.manager [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1836.144290] env[63024]: DEBUG oslo_vmware.rw_handles [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Completed reading data from the image iterator. {{(pid=63024) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1836.144525] env[63024]: DEBUG oslo_vmware.rw_handles [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52580dd9-bb5a-fbcc-e286-c6815f165a0a/disk-0.vmdk. {{(pid=63024) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1836.145549] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45acf2df-39a5-468b-8eb5-285b590e70b8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.152449] env[63024]: DEBUG oslo_vmware.rw_handles [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52580dd9-bb5a-fbcc-e286-c6815f165a0a/disk-0.vmdk is in state: ready. {{(pid=63024) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1836.152639] env[63024]: DEBUG oslo_vmware.rw_handles [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52580dd9-bb5a-fbcc-e286-c6815f165a0a/disk-0.vmdk. {{(pid=63024) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1836.152872] env[63024]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-80e342ac-d128-4a54-b1da-ba526ec3ed35 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.316050] env[63024]: DEBUG oslo_vmware.rw_handles [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52580dd9-bb5a-fbcc-e286-c6815f165a0a/disk-0.vmdk. {{(pid=63024) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1836.316279] env[63024]: INFO nova.virt.vmwareapi.images [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Downloaded image file data ee5d780e-10a0-4109-93a4-c4258b879f3d [ 1836.317482] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a88c71d-62d8-4c3b-9d45-a23a10dcb5ab {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.324140] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951078, 'name': CreateVM_Task, 'duration_secs': 0.34936} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1836.324640] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1836.325407] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1836.325595] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1836.325950] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1836.340205] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc266ffd-c013-456c-998c-9caf0f9c626e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.342025] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-17a219c0-b59f-47c0-b0b3-be15a4fcf344 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.346925] env[63024]: DEBUG oslo_vmware.api [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 1836.346925] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52d436eb-af4e-a9fa-d31a-f52306133c07" [ 1836.346925] env[63024]: _type = "Task" [ 1836.346925] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1836.355150] env[63024]: DEBUG oslo_vmware.api [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52d436eb-af4e-a9fa-d31a-f52306133c07, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.372315] env[63024]: INFO nova.virt.vmwareapi.images [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] The imported VM was unregistered [ 1836.376697] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Caching image {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1836.376697] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Creating directory with path [datastore1] devstack-image-cache_base/ee5d780e-10a0-4109-93a4-c4258b879f3d {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1836.380481] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f679a7b1-d4c6-4924-bde3-7ffdeb2aec2e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.383317] env[63024]: DEBUG oslo_concurrency.lockutils [None req-80512235-9f10-4ac5-a6da-8030b793b94d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "6156ce17-3f29-487a-afc5-2fa0fb7f114c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.227s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1836.394013] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Created directory with path [datastore1] devstack-image-cache_base/ee5d780e-10a0-4109-93a4-c4258b879f3d {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1836.394289] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_e82cee42-ae34-4594-b151-ce7311f36e13/OSTACK_IMG_e82cee42-ae34-4594-b151-ce7311f36e13.vmdk to [datastore1] devstack-image-cache_base/ee5d780e-10a0-4109-93a4-c4258b879f3d/ee5d780e-10a0-4109-93a4-c4258b879f3d.vmdk. {{(pid=63024) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1836.394400] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-cf48e4e1-cf07-4cb8-b98e-673040ea7209 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.401385] env[63024]: DEBUG oslo_vmware.api [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1836.401385] env[63024]: value = "task-1951080" [ 1836.401385] env[63024]: _type = "Task" [ 1836.401385] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1836.409757] env[63024]: DEBUG oslo_vmware.api [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951080, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.603013] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1836.695030] env[63024]: DEBUG nova.network.neutron [req-791a07d7-381b-4d86-a95b-614fec0da8ba req-b8dc86f2-3710-4bd3-ad16-ca6e14a023ad service nova] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Updated VIF entry in instance network info cache for port a86b5113-d05e-45ac-bf54-833ea769eae5. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1836.695030] env[63024]: DEBUG nova.network.neutron [req-791a07d7-381b-4d86-a95b-614fec0da8ba req-b8dc86f2-3710-4bd3-ad16-ca6e14a023ad service nova] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Updating instance_info_cache with network_info: [{"id": "a86b5113-d05e-45ac-bf54-833ea769eae5", "address": "fa:16:3e:7e:0d:a2", "network": {"id": "27687c23-521d-4beb-ad4f-278994149c2c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-381619610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e53c02ad56640dc8cbc8839669b67bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "09bf081b-cdf0-4977-abe2-2339a87409ab", "external-id": "nsx-vlan-transportzone-378", "segmentation_id": 378, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa86b5113-d0", "ovs_interfaceid": "a86b5113-d05e-45ac-bf54-833ea769eae5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1836.864880] env[63024]: DEBUG oslo_vmware.api [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52d436eb-af4e-a9fa-d31a-f52306133c07, 'name': SearchDatastore_Task, 'duration_secs': 0.029902} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1836.865527] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1836.865527] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1836.865675] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1836.865846] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1836.866146] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1836.866328] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f99eabe0-e735-45d5-87c9-fe47517e9d7b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.875926] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1836.876053] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1836.876878] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8df0b6a3-11a2-492a-8adc-49249a1d809f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1836.882252] env[63024]: DEBUG oslo_vmware.api [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 1836.882252] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5281e388-cfb1-95ff-668f-9740c48d8906" [ 1836.882252] env[63024]: _type = "Task" [ 1836.882252] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1836.893627] env[63024]: DEBUG oslo_vmware.api [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5281e388-cfb1-95ff-668f-9740c48d8906, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1836.911473] env[63024]: DEBUG oslo_vmware.api [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951080, 'name': MoveVirtualDisk_Task} progress is 9%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.197698] env[63024]: DEBUG oslo_concurrency.lockutils [req-791a07d7-381b-4d86-a95b-614fec0da8ba req-b8dc86f2-3710-4bd3-ad16-ca6e14a023ad service nova] Releasing lock "refresh_cache-fe6847e2-a742-4338-983f-698c13aaefde" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1837.354156] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44adeb85-e44d-48cd-a3ba-f74c732389f6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.362890] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5e2a861-a92b-4069-a738-74ce29de5fbd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.415916] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86a08834-674c-46b8-b7cc-e99ebfc32331 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.427107] env[63024]: DEBUG oslo_vmware.api [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5281e388-cfb1-95ff-668f-9740c48d8906, 'name': SearchDatastore_Task, 'duration_secs': 0.094983} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1837.434128] env[63024]: DEBUG oslo_vmware.api [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951080, 'name': MoveVirtualDisk_Task} progress is 29%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.434399] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8338e2dd-18bb-4f8d-9186-ae0aa97f48a9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.437957] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-115f82e9-c05a-40e6-b09e-d1081f5e646a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.454813] env[63024]: DEBUG nova.compute.provider_tree [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1837.457278] env[63024]: DEBUG oslo_vmware.api [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 1837.457278] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]529b4c46-9ccc-ec16-00d9-9e3b979dba3c" [ 1837.457278] env[63024]: _type = "Task" [ 1837.457278] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1837.468823] env[63024]: DEBUG oslo_vmware.api [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]529b4c46-9ccc-ec16-00d9-9e3b979dba3c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.918109] env[63024]: DEBUG oslo_vmware.api [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951080, 'name': MoveVirtualDisk_Task} progress is 49%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1837.967464] env[63024]: DEBUG nova.scheduler.client.report [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1837.980728] env[63024]: DEBUG oslo_vmware.api [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]529b4c46-9ccc-ec16-00d9-9e3b979dba3c, 'name': SearchDatastore_Task, 'duration_secs': 0.097115} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1837.983907] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1837.983907] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] fe6847e2-a742-4338-983f-698c13aaefde/fe6847e2-a742-4338-983f-698c13aaefde.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1837.983907] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2a916699-a39c-4e3a-bdc7-0ff154af9c50 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1837.994298] env[63024]: DEBUG oslo_vmware.api [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 1837.994298] env[63024]: value = "task-1951081" [ 1837.994298] env[63024]: _type = "Task" [ 1837.994298] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1838.007101] env[63024]: DEBUG oslo_vmware.api [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951081, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.114485] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "56d220f3-b97c-4cbe-b582-c4a4f1171472" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1838.114692] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "56d220f3-b97c-4cbe-b582-c4a4f1171472" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1838.418729] env[63024]: DEBUG oslo_vmware.api [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951080, 'name': MoveVirtualDisk_Task} progress is 71%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.475879] env[63024]: DEBUG oslo_concurrency.lockutils [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.639s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1838.476495] env[63024]: DEBUG nova.compute.manager [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1838.479631] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.821s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1838.481361] env[63024]: INFO nova.compute.claims [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1838.507541] env[63024]: DEBUG oslo_vmware.api [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951081, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.578280] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8bbeb374-afe8-417d-8efe-deb25619141f tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Acquiring lock "ac60546a-37b2-4d2a-8505-61fe202e2ed0" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1838.579953] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8bbeb374-afe8-417d-8efe-deb25619141f tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Lock "ac60546a-37b2-4d2a-8505-61fe202e2ed0" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1838.579953] env[63024]: DEBUG nova.compute.manager [None req-8bbeb374-afe8-417d-8efe-deb25619141f tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1838.580144] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be66c1c2-07d4-4223-a699-2d6482a61378 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1838.592688] env[63024]: DEBUG nova.compute.manager [None req-8bbeb374-afe8-417d-8efe-deb25619141f tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63024) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1838.592891] env[63024]: DEBUG nova.objects.instance [None req-8bbeb374-afe8-417d-8efe-deb25619141f tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Lazy-loading 'flavor' on Instance uuid ac60546a-37b2-4d2a-8505-61fe202e2ed0 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1838.919931] env[63024]: DEBUG oslo_vmware.api [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951080, 'name': MoveVirtualDisk_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1838.986641] env[63024]: DEBUG nova.compute.utils [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1838.992118] env[63024]: DEBUG nova.compute.manager [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1838.992118] env[63024]: DEBUG nova.network.neutron [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1839.008872] env[63024]: DEBUG oslo_vmware.api [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951081, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.031437] env[63024]: DEBUG nova.policy [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e830cb2332664f389bd463d2cace2352', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4c262cc280074a0bb3b8967f2cbb7c73', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1839.301163] env[63024]: DEBUG nova.network.neutron [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Successfully created port: 9241a9a2-d60f-4c1f-a092-5caa7b738112 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1839.423860] env[63024]: DEBUG oslo_vmware.api [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951080, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.706405} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1839.424302] env[63024]: INFO nova.virt.vmwareapi.ds_util [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_e82cee42-ae34-4594-b151-ce7311f36e13/OSTACK_IMG_e82cee42-ae34-4594-b151-ce7311f36e13.vmdk to [datastore1] devstack-image-cache_base/ee5d780e-10a0-4109-93a4-c4258b879f3d/ee5d780e-10a0-4109-93a4-c4258b879f3d.vmdk. [ 1839.424607] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Cleaning up location [datastore1] OSTACK_IMG_e82cee42-ae34-4594-b151-ce7311f36e13 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1839.424915] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_e82cee42-ae34-4594-b151-ce7311f36e13 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1839.425311] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b8d2f36b-67b2-4322-91f0-12b451611a9d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.434091] env[63024]: DEBUG oslo_vmware.api [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1839.434091] env[63024]: value = "task-1951082" [ 1839.434091] env[63024]: _type = "Task" [ 1839.434091] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1839.446015] env[63024]: DEBUG oslo_vmware.api [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951082, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.495440] env[63024]: DEBUG nova.compute.manager [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1839.512473] env[63024]: DEBUG oslo_vmware.api [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951081, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.601020] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bbeb374-afe8-417d-8efe-deb25619141f tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1839.603792] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b888ea39-d97e-4a83-8252-b6817d83596e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.611901] env[63024]: DEBUG oslo_vmware.api [None req-8bbeb374-afe8-417d-8efe-deb25619141f tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Waiting for the task: (returnval){ [ 1839.611901] env[63024]: value = "task-1951083" [ 1839.611901] env[63024]: _type = "Task" [ 1839.611901] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1839.624486] env[63024]: DEBUG oslo_vmware.api [None req-8bbeb374-afe8-417d-8efe-deb25619141f tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951083, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.946228] env[63024]: DEBUG oslo_vmware.api [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951082, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.201217} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1839.949928] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1839.950237] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Releasing lock "[datastore1] devstack-image-cache_base/ee5d780e-10a0-4109-93a4-c4258b879f3d/ee5d780e-10a0-4109-93a4-c4258b879f3d.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1839.950620] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/ee5d780e-10a0-4109-93a4-c4258b879f3d/ee5d780e-10a0-4109-93a4-c4258b879f3d.vmdk to [datastore1] 9bf1316e-f1ae-426e-a0a2-d814a2460c4d/9bf1316e-f1ae-426e-a0a2-d814a2460c4d.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1839.951212] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c9ae5292-f1d0-4eec-9179-7f3580138b93 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.959687] env[63024]: DEBUG oslo_vmware.api [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1839.959687] env[63024]: value = "task-1951084" [ 1839.959687] env[63024]: _type = "Task" [ 1839.959687] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1839.974100] env[63024]: DEBUG oslo_vmware.api [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951084, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1839.977674] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fb2bcce-da27-499d-9ce2-aec1c12baf33 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1839.985818] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb2e814c-5625-4b95-90ac-72c486070b5e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.024146] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a529b01e-98df-41c7-8798-5fe634ce4aec {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.035022] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d43e7912-0b4b-4c69-a5a8-11c52fbdb7f3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.038917] env[63024]: DEBUG oslo_vmware.api [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951081, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.635602} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1840.039238] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] fe6847e2-a742-4338-983f-698c13aaefde/fe6847e2-a742-4338-983f-698c13aaefde.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1840.039452] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1840.040086] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-05ac2e33-7ced-4612-a795-c0808f423387 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.052531] env[63024]: DEBUG nova.compute.provider_tree [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1840.058831] env[63024]: DEBUG oslo_vmware.api [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 1840.058831] env[63024]: value = "task-1951085" [ 1840.058831] env[63024]: _type = "Task" [ 1840.058831] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1840.070013] env[63024]: DEBUG oslo_vmware.api [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951085, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.121585] env[63024]: DEBUG oslo_vmware.api [None req-8bbeb374-afe8-417d-8efe-deb25619141f tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951083, 'name': PowerOffVM_Task, 'duration_secs': 0.355351} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1840.121903] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-8bbeb374-afe8-417d-8efe-deb25619141f tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1840.122105] env[63024]: DEBUG nova.compute.manager [None req-8bbeb374-afe8-417d-8efe-deb25619141f tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1840.122902] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfcda356-6caa-4bc3-8a09-7c64104a6e31 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.469964] env[63024]: DEBUG oslo_vmware.api [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951084, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.528408] env[63024]: DEBUG nova.compute.manager [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1840.555136] env[63024]: DEBUG nova.virt.hardware [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1840.555378] env[63024]: DEBUG nova.virt.hardware [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1840.557091] env[63024]: DEBUG nova.virt.hardware [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1840.557091] env[63024]: DEBUG nova.virt.hardware [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1840.557091] env[63024]: DEBUG nova.virt.hardware [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1840.557091] env[63024]: DEBUG nova.virt.hardware [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1840.557091] env[63024]: DEBUG nova.virt.hardware [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1840.557091] env[63024]: DEBUG nova.virt.hardware [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1840.557091] env[63024]: DEBUG nova.virt.hardware [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1840.557091] env[63024]: DEBUG nova.virt.hardware [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1840.557091] env[63024]: DEBUG nova.virt.hardware [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1840.557694] env[63024]: DEBUG nova.scheduler.client.report [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1840.562158] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b694f17-2181-44d4-99df-ee2cdbff4c4b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.576904] env[63024]: DEBUG oslo_vmware.api [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951085, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06906} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1840.579361] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1840.580259] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24c9db27-1a7a-48fe-9a99-d8d30323aa5c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.587361] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8685b230-17ca-482d-aa82-a804f4c82ca0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.612093] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Reconfiguring VM instance instance-00000041 to attach disk [datastore1] fe6847e2-a742-4338-983f-698c13aaefde/fe6847e2-a742-4338-983f-698c13aaefde.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1840.620603] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-90b8db47-85cc-4bbb-a6be-36735b1673dc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.637690] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8bbeb374-afe8-417d-8efe-deb25619141f tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Lock "ac60546a-37b2-4d2a-8505-61fe202e2ed0" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.059s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1840.642885] env[63024]: DEBUG oslo_vmware.api [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 1840.642885] env[63024]: value = "task-1951086" [ 1840.642885] env[63024]: _type = "Task" [ 1840.642885] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1840.653950] env[63024]: DEBUG oslo_vmware.api [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951086, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1840.764443] env[63024]: DEBUG nova.compute.manager [req-36027144-ac82-47fb-995d-1119d755903d req-57df78de-0c30-47c5-94c7-fc5afbd96c5a service nova] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Received event network-vif-plugged-9241a9a2-d60f-4c1f-a092-5caa7b738112 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1840.764711] env[63024]: DEBUG oslo_concurrency.lockutils [req-36027144-ac82-47fb-995d-1119d755903d req-57df78de-0c30-47c5-94c7-fc5afbd96c5a service nova] Acquiring lock "1448c924-7c61-4c43-a4e7-5a6dd45375cc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1840.764920] env[63024]: DEBUG oslo_concurrency.lockutils [req-36027144-ac82-47fb-995d-1119d755903d req-57df78de-0c30-47c5-94c7-fc5afbd96c5a service nova] Lock "1448c924-7c61-4c43-a4e7-5a6dd45375cc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1840.765096] env[63024]: DEBUG oslo_concurrency.lockutils [req-36027144-ac82-47fb-995d-1119d755903d req-57df78de-0c30-47c5-94c7-fc5afbd96c5a service nova] Lock "1448c924-7c61-4c43-a4e7-5a6dd45375cc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1840.765268] env[63024]: DEBUG nova.compute.manager [req-36027144-ac82-47fb-995d-1119d755903d req-57df78de-0c30-47c5-94c7-fc5afbd96c5a service nova] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] No waiting events found dispatching network-vif-plugged-9241a9a2-d60f-4c1f-a092-5caa7b738112 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1840.765461] env[63024]: WARNING nova.compute.manager [req-36027144-ac82-47fb-995d-1119d755903d req-57df78de-0c30-47c5-94c7-fc5afbd96c5a service nova] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Received unexpected event network-vif-plugged-9241a9a2-d60f-4c1f-a092-5caa7b738112 for instance with vm_state building and task_state spawning. [ 1840.972762] env[63024]: DEBUG oslo_vmware.api [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951084, 'name': CopyVirtualDisk_Task} progress is 12%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.072419] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.590s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1841.072419] env[63024]: DEBUG nova.compute.manager [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1841.074666] env[63024]: DEBUG nova.network.neutron [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Successfully updated port: 9241a9a2-d60f-4c1f-a092-5caa7b738112 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1841.074905] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.996s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1841.076375] env[63024]: INFO nova.compute.claims [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1841.155271] env[63024]: DEBUG oslo_vmware.api [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951086, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.310045] env[63024]: DEBUG nova.objects.instance [None req-2722ddcf-78d2-437a-bda6-4033bc97edc7 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Lazy-loading 'flavor' on Instance uuid ac60546a-37b2-4d2a-8505-61fe202e2ed0 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1841.472560] env[63024]: DEBUG oslo_vmware.api [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951084, 'name': CopyVirtualDisk_Task} progress is 35%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.582340] env[63024]: DEBUG nova.compute.utils [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1841.588663] env[63024]: DEBUG oslo_concurrency.lockutils [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Acquiring lock "refresh_cache-1448c924-7c61-4c43-a4e7-5a6dd45375cc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1841.588663] env[63024]: DEBUG oslo_concurrency.lockutils [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Acquired lock "refresh_cache-1448c924-7c61-4c43-a4e7-5a6dd45375cc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1841.588663] env[63024]: DEBUG nova.network.neutron [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1841.589163] env[63024]: DEBUG nova.compute.manager [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1841.589387] env[63024]: DEBUG nova.network.neutron [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1841.645595] env[63024]: DEBUG nova.policy [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '07af525e7d7f4f9783339f4f5aa58f5b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5dcb1fcc9fd945cb9f4477fe1cce3f5b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1841.661925] env[63024]: DEBUG oslo_vmware.api [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951086, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.816144] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2722ddcf-78d2-437a-bda6-4033bc97edc7 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Acquiring lock "refresh_cache-ac60546a-37b2-4d2a-8505-61fe202e2ed0" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1841.816369] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2722ddcf-78d2-437a-bda6-4033bc97edc7 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Acquired lock "refresh_cache-ac60546a-37b2-4d2a-8505-61fe202e2ed0" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1841.816577] env[63024]: DEBUG nova.network.neutron [None req-2722ddcf-78d2-437a-bda6-4033bc97edc7 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1841.816773] env[63024]: DEBUG nova.objects.instance [None req-2722ddcf-78d2-437a-bda6-4033bc97edc7 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Lazy-loading 'info_cache' on Instance uuid ac60546a-37b2-4d2a-8505-61fe202e2ed0 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1841.948323] env[63024]: DEBUG nova.network.neutron [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Successfully created port: f78f097c-0df1-4f4f-8941-cf21c2b2ca4b {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1841.974094] env[63024]: DEBUG oslo_vmware.api [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951084, 'name': CopyVirtualDisk_Task} progress is 54%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1842.093391] env[63024]: DEBUG nova.compute.manager [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1842.147740] env[63024]: DEBUG nova.network.neutron [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1842.170136] env[63024]: DEBUG oslo_vmware.api [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951086, 'name': ReconfigVM_Task, 'duration_secs': 1.091691} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1842.171995] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Reconfigured VM instance instance-00000041 to attach disk [datastore1] fe6847e2-a742-4338-983f-698c13aaefde/fe6847e2-a742-4338-983f-698c13aaefde.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1842.175441] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-28ea3964-a110-4fb7-8691-ee27c8dcccbd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.189685] env[63024]: DEBUG oslo_vmware.api [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 1842.189685] env[63024]: value = "task-1951087" [ 1842.189685] env[63024]: _type = "Task" [ 1842.189685] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1842.202410] env[63024]: DEBUG oslo_vmware.api [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951087, 'name': Rename_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1842.321159] env[63024]: DEBUG nova.objects.base [None req-2722ddcf-78d2-437a-bda6-4033bc97edc7 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=63024) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1842.388215] env[63024]: DEBUG nova.network.neutron [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Updating instance_info_cache with network_info: [{"id": "9241a9a2-d60f-4c1f-a092-5caa7b738112", "address": "fa:16:3e:2a:d5:7a", "network": {"id": "ce29ceef-bd62-4366-81a9-4c99d66ac178", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-898882035-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "4c262cc280074a0bb3b8967f2cbb7c73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec3f9e71-839a-429d-b211-d3dfc98ca4f6", "external-id": "nsx-vlan-transportzone-5", "segmentation_id": 5, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9241a9a2-d6", "ovs_interfaceid": "9241a9a2-d60f-4c1f-a092-5caa7b738112", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1842.476178] env[63024]: DEBUG oslo_vmware.api [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951084, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1842.673586] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9531ded1-e6ee-4263-ad63-d5e8c597df14 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.683822] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c620a471-13d9-4a3b-8285-35c5bec952c3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.723042] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61409c97-8634-4e16-af62-063d601bb017 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.730203] env[63024]: DEBUG oslo_vmware.api [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951087, 'name': Rename_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1842.738772] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-358a9885-89c9-49ac-9b69-435306f620a5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.755917] env[63024]: DEBUG nova.compute.provider_tree [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1842.836332] env[63024]: DEBUG nova.compute.manager [req-ae0a204a-be41-472a-9b8e-27e983a810ce req-e964ff29-f2be-4465-811d-2701b3c3c2af service nova] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Received event network-changed-9241a9a2-d60f-4c1f-a092-5caa7b738112 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1842.836599] env[63024]: DEBUG nova.compute.manager [req-ae0a204a-be41-472a-9b8e-27e983a810ce req-e964ff29-f2be-4465-811d-2701b3c3c2af service nova] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Refreshing instance network info cache due to event network-changed-9241a9a2-d60f-4c1f-a092-5caa7b738112. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1842.836830] env[63024]: DEBUG oslo_concurrency.lockutils [req-ae0a204a-be41-472a-9b8e-27e983a810ce req-e964ff29-f2be-4465-811d-2701b3c3c2af service nova] Acquiring lock "refresh_cache-1448c924-7c61-4c43-a4e7-5a6dd45375cc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1842.893104] env[63024]: DEBUG oslo_concurrency.lockutils [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Releasing lock "refresh_cache-1448c924-7c61-4c43-a4e7-5a6dd45375cc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1842.893466] env[63024]: DEBUG nova.compute.manager [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Instance network_info: |[{"id": "9241a9a2-d60f-4c1f-a092-5caa7b738112", "address": "fa:16:3e:2a:d5:7a", "network": {"id": "ce29ceef-bd62-4366-81a9-4c99d66ac178", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-898882035-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "4c262cc280074a0bb3b8967f2cbb7c73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec3f9e71-839a-429d-b211-d3dfc98ca4f6", "external-id": "nsx-vlan-transportzone-5", "segmentation_id": 5, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9241a9a2-d6", "ovs_interfaceid": "9241a9a2-d60f-4c1f-a092-5caa7b738112", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1842.893789] env[63024]: DEBUG oslo_concurrency.lockutils [req-ae0a204a-be41-472a-9b8e-27e983a810ce req-e964ff29-f2be-4465-811d-2701b3c3c2af service nova] Acquired lock "refresh_cache-1448c924-7c61-4c43-a4e7-5a6dd45375cc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1842.893962] env[63024]: DEBUG nova.network.neutron [req-ae0a204a-be41-472a-9b8e-27e983a810ce req-e964ff29-f2be-4465-811d-2701b3c3c2af service nova] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Refreshing network info cache for port 9241a9a2-d60f-4c1f-a092-5caa7b738112 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1842.895304] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2a:d5:7a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ec3f9e71-839a-429d-b211-d3dfc98ca4f6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9241a9a2-d60f-4c1f-a092-5caa7b738112', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1842.903071] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Creating folder: Project (4c262cc280074a0bb3b8967f2cbb7c73). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1842.904139] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-50ff869e-627f-41ae-8f95-c78f303405a2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.915834] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Created folder: Project (4c262cc280074a0bb3b8967f2cbb7c73) in parent group-v401959. [ 1842.916051] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Creating folder: Instances. Parent ref: group-v402142. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1842.916351] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1a833df1-e5b7-4978-9cb8-934c56f6d470 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.929019] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Created folder: Instances in parent group-v402142. [ 1842.929470] env[63024]: DEBUG oslo.service.loopingcall [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1842.929470] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1842.929650] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1df3cd83-b7a3-489c-b6b1-365a34056d61 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.952551] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1842.952551] env[63024]: value = "task-1951090" [ 1842.952551] env[63024]: _type = "Task" [ 1842.952551] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1842.962306] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951090, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1842.972451] env[63024]: DEBUG oslo_vmware.api [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951084, 'name': CopyVirtualDisk_Task} progress is 97%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.104287] env[63024]: DEBUG nova.compute.manager [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1843.109258] env[63024]: DEBUG nova.network.neutron [None req-2722ddcf-78d2-437a-bda6-4033bc97edc7 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Updating instance_info_cache with network_info: [{"id": "4f25b42b-a210-4630-9dc5-b2e92c31b4f5", "address": "fa:16:3e:40:14:8d", "network": {"id": "3ece47b4-8cf7-4658-bdd0-f49089890e34", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-339226542-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a158ec715843423e8f013939e0071c71", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ea0fc1b-0424-46ec-bef5-6b57b7d184d8", "external-id": "nsx-vlan-transportzone-618", "segmentation_id": 618, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f25b42b-a2", "ovs_interfaceid": "4f25b42b-a210-4630-9dc5-b2e92c31b4f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1843.130883] env[63024]: DEBUG nova.virt.hardware [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1843.131242] env[63024]: DEBUG nova.virt.hardware [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1843.131422] env[63024]: DEBUG nova.virt.hardware [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1843.131608] env[63024]: DEBUG nova.virt.hardware [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1843.131760] env[63024]: DEBUG nova.virt.hardware [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1843.131907] env[63024]: DEBUG nova.virt.hardware [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1843.132186] env[63024]: DEBUG nova.virt.hardware [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1843.132392] env[63024]: DEBUG nova.virt.hardware [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1843.132613] env[63024]: DEBUG nova.virt.hardware [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1843.132823] env[63024]: DEBUG nova.virt.hardware [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1843.133061] env[63024]: DEBUG nova.virt.hardware [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1843.133969] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20419362-5a33-4946-9970-935caba3f234 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.143067] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc2cd4fe-667c-4aa4-90f8-a6e9acd03e8b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.200110] env[63024]: DEBUG oslo_vmware.api [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951087, 'name': Rename_Task, 'duration_secs': 0.554907} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1843.200380] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1843.200625] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-aba45f6b-9435-424e-ad4a-30af11b70029 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.207453] env[63024]: DEBUG oslo_vmware.api [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 1843.207453] env[63024]: value = "task-1951091" [ 1843.207453] env[63024]: _type = "Task" [ 1843.207453] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1843.217499] env[63024]: DEBUG oslo_vmware.api [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951091, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.261297] env[63024]: DEBUG nova.scheduler.client.report [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1843.464456] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951090, 'name': CreateVM_Task, 'duration_secs': 0.391107} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1843.464456] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1843.464456] env[63024]: DEBUG oslo_concurrency.lockutils [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1843.464456] env[63024]: DEBUG oslo_concurrency.lockutils [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1843.464456] env[63024]: DEBUG oslo_concurrency.lockutils [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1843.464456] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bead2fa3-f2a6-4de9-ad0a-7d4e63e764a9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.470550] env[63024]: DEBUG nova.network.neutron [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Successfully updated port: f78f097c-0df1-4f4f-8941-cf21c2b2ca4b {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1843.474577] env[63024]: DEBUG oslo_vmware.api [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Waiting for the task: (returnval){ [ 1843.474577] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e5d0f0-e1b8-5d0c-d8b3-f73bc79c5f0d" [ 1843.474577] env[63024]: _type = "Task" [ 1843.474577] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1843.477793] env[63024]: DEBUG oslo_vmware.api [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951084, 'name': CopyVirtualDisk_Task, 'duration_secs': 3.103834} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1843.480842] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/ee5d780e-10a0-4109-93a4-c4258b879f3d/ee5d780e-10a0-4109-93a4-c4258b879f3d.vmdk to [datastore1] 9bf1316e-f1ae-426e-a0a2-d814a2460c4d/9bf1316e-f1ae-426e-a0a2-d814a2460c4d.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1843.481813] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-115fce2d-6a3c-4123-aa71-8fcef8db3fe5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.490092] env[63024]: DEBUG oslo_vmware.api [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e5d0f0-e1b8-5d0c-d8b3-f73bc79c5f0d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.508108] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Reconfiguring VM instance instance-00000040 to attach disk [datastore1] 9bf1316e-f1ae-426e-a0a2-d814a2460c4d/9bf1316e-f1ae-426e-a0a2-d814a2460c4d.vmdk or device None with type streamOptimized {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1843.508613] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6767d491-634a-4b21-94db-f7cc491acbca {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.529854] env[63024]: DEBUG oslo_vmware.api [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1843.529854] env[63024]: value = "task-1951092" [ 1843.529854] env[63024]: _type = "Task" [ 1843.529854] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1843.537466] env[63024]: DEBUG oslo_vmware.api [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951092, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.612378] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2722ddcf-78d2-437a-bda6-4033bc97edc7 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Releasing lock "refresh_cache-ac60546a-37b2-4d2a-8505-61fe202e2ed0" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1843.648370] env[63024]: DEBUG nova.network.neutron [req-ae0a204a-be41-472a-9b8e-27e983a810ce req-e964ff29-f2be-4465-811d-2701b3c3c2af service nova] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Updated VIF entry in instance network info cache for port 9241a9a2-d60f-4c1f-a092-5caa7b738112. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1843.649043] env[63024]: DEBUG nova.network.neutron [req-ae0a204a-be41-472a-9b8e-27e983a810ce req-e964ff29-f2be-4465-811d-2701b3c3c2af service nova] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Updating instance_info_cache with network_info: [{"id": "9241a9a2-d60f-4c1f-a092-5caa7b738112", "address": "fa:16:3e:2a:d5:7a", "network": {"id": "ce29ceef-bd62-4366-81a9-4c99d66ac178", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-898882035-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "4c262cc280074a0bb3b8967f2cbb7c73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec3f9e71-839a-429d-b211-d3dfc98ca4f6", "external-id": "nsx-vlan-transportzone-5", "segmentation_id": 5, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9241a9a2-d6", "ovs_interfaceid": "9241a9a2-d60f-4c1f-a092-5caa7b738112", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1843.717430] env[63024]: DEBUG oslo_vmware.api [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951091, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1843.766956] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.692s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1843.767510] env[63024]: DEBUG nova.compute.manager [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1843.770350] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.929s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1843.772092] env[63024]: INFO nova.compute.claims [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1843.973860] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Acquiring lock "refresh_cache-73db94b8-cfa8-4457-bccb-d4b780edbd93" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1843.974640] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Acquired lock "refresh_cache-73db94b8-cfa8-4457-bccb-d4b780edbd93" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1843.974640] env[63024]: DEBUG nova.network.neutron [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1843.987876] env[63024]: DEBUG oslo_vmware.api [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e5d0f0-e1b8-5d0c-d8b3-f73bc79c5f0d, 'name': SearchDatastore_Task, 'duration_secs': 0.058288} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1843.988204] env[63024]: DEBUG oslo_concurrency.lockutils [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1843.988440] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1843.988668] env[63024]: DEBUG oslo_concurrency.lockutils [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1843.988812] env[63024]: DEBUG oslo_concurrency.lockutils [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1843.988995] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1843.989264] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a2d90415-d548-4f16-94b3-4338c633380c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.002327] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1844.002540] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1844.003429] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79044a2b-c65b-440e-a6ff-792c69951094 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.008729] env[63024]: DEBUG oslo_vmware.api [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Waiting for the task: (returnval){ [ 1844.008729] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5274ef1e-66a4-7714-f8e4-e951cc6b52d1" [ 1844.008729] env[63024]: _type = "Task" [ 1844.008729] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1844.017942] env[63024]: DEBUG oslo_vmware.api [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5274ef1e-66a4-7714-f8e4-e951cc6b52d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.038651] env[63024]: DEBUG oslo_vmware.api [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951092, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.151962] env[63024]: DEBUG oslo_concurrency.lockutils [req-ae0a204a-be41-472a-9b8e-27e983a810ce req-e964ff29-f2be-4465-811d-2701b3c3c2af service nova] Releasing lock "refresh_cache-1448c924-7c61-4c43-a4e7-5a6dd45375cc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1844.217814] env[63024]: DEBUG oslo_vmware.api [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951091, 'name': PowerOnVM_Task, 'duration_secs': 0.559502} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1844.218097] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1844.218338] env[63024]: INFO nova.compute.manager [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Took 10.67 seconds to spawn the instance on the hypervisor. [ 1844.218523] env[63024]: DEBUG nova.compute.manager [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1844.219273] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edf86c05-ee19-45e8-82b7-8489ff1eebd9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.279307] env[63024]: DEBUG nova.compute.utils [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1844.280828] env[63024]: DEBUG nova.compute.manager [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1844.280995] env[63024]: DEBUG nova.network.neutron [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1844.351737] env[63024]: DEBUG nova.policy [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1ef31aa3582f4b2dab5f9ce2465d5e32', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '674f344eaf784662ac922405620a3ac4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1844.519261] env[63024]: DEBUG oslo_vmware.api [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5274ef1e-66a4-7714-f8e4-e951cc6b52d1, 'name': SearchDatastore_Task, 'duration_secs': 0.013726} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1844.520125] env[63024]: DEBUG nova.network.neutron [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1844.522604] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a325f838-1c98-4ecf-af22-6a9a6b2e6ebf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.528427] env[63024]: DEBUG oslo_vmware.api [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Waiting for the task: (returnval){ [ 1844.528427] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]527b51e2-f071-dcc3-dea3-f9dff2e6f48d" [ 1844.528427] env[63024]: _type = "Task" [ 1844.528427] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1844.539148] env[63024]: DEBUG oslo_vmware.api [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]527b51e2-f071-dcc3-dea3-f9dff2e6f48d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.542416] env[63024]: DEBUG oslo_vmware.api [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951092, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.618298] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-2722ddcf-78d2-437a-bda6-4033bc97edc7 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1844.618698] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2b9ebc1e-9cce-4f1e-9bb5-91c4dc300654 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.625497] env[63024]: DEBUG oslo_vmware.api [None req-2722ddcf-78d2-437a-bda6-4033bc97edc7 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Waiting for the task: (returnval){ [ 1844.625497] env[63024]: value = "task-1951093" [ 1844.625497] env[63024]: _type = "Task" [ 1844.625497] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1844.633495] env[63024]: DEBUG oslo_vmware.api [None req-2722ddcf-78d2-437a-bda6-4033bc97edc7 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951093, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.660559] env[63024]: DEBUG nova.network.neutron [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Updating instance_info_cache with network_info: [{"id": "f78f097c-0df1-4f4f-8941-cf21c2b2ca4b", "address": "fa:16:3e:df:09:6b", "network": {"id": "83ed1c04-a2e0-4c15-ae35-68e988607ce4", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-470202335-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5dcb1fcc9fd945cb9f4477fe1cce3f5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf78f097c-0d", "ovs_interfaceid": "f78f097c-0df1-4f4f-8941-cf21c2b2ca4b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1844.736095] env[63024]: INFO nova.compute.manager [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Took 36.87 seconds to build instance. [ 1844.785534] env[63024]: DEBUG nova.compute.manager [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1844.835511] env[63024]: DEBUG nova.network.neutron [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Successfully created port: dcca960a-05bc-4b0e-b542-36ec3c6c3543 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1844.981852] env[63024]: DEBUG nova.compute.manager [req-b201f3b6-f346-4ced-aca5-0dbed60b1363 req-361d364f-7b23-4e6f-b965-242dc657dcc7 service nova] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Received event network-vif-plugged-f78f097c-0df1-4f4f-8941-cf21c2b2ca4b {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1844.982646] env[63024]: DEBUG oslo_concurrency.lockutils [req-b201f3b6-f346-4ced-aca5-0dbed60b1363 req-361d364f-7b23-4e6f-b965-242dc657dcc7 service nova] Acquiring lock "73db94b8-cfa8-4457-bccb-d4b780edbd93-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1844.982960] env[63024]: DEBUG oslo_concurrency.lockutils [req-b201f3b6-f346-4ced-aca5-0dbed60b1363 req-361d364f-7b23-4e6f-b965-242dc657dcc7 service nova] Lock "73db94b8-cfa8-4457-bccb-d4b780edbd93-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1844.983223] env[63024]: DEBUG oslo_concurrency.lockutils [req-b201f3b6-f346-4ced-aca5-0dbed60b1363 req-361d364f-7b23-4e6f-b965-242dc657dcc7 service nova] Lock "73db94b8-cfa8-4457-bccb-d4b780edbd93-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1844.983463] env[63024]: DEBUG nova.compute.manager [req-b201f3b6-f346-4ced-aca5-0dbed60b1363 req-361d364f-7b23-4e6f-b965-242dc657dcc7 service nova] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] No waiting events found dispatching network-vif-plugged-f78f097c-0df1-4f4f-8941-cf21c2b2ca4b {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1844.983670] env[63024]: WARNING nova.compute.manager [req-b201f3b6-f346-4ced-aca5-0dbed60b1363 req-361d364f-7b23-4e6f-b965-242dc657dcc7 service nova] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Received unexpected event network-vif-plugged-f78f097c-0df1-4f4f-8941-cf21c2b2ca4b for instance with vm_state building and task_state spawning. [ 1844.983879] env[63024]: DEBUG nova.compute.manager [req-b201f3b6-f346-4ced-aca5-0dbed60b1363 req-361d364f-7b23-4e6f-b965-242dc657dcc7 service nova] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Received event network-changed-f78f097c-0df1-4f4f-8941-cf21c2b2ca4b {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1844.984095] env[63024]: DEBUG nova.compute.manager [req-b201f3b6-f346-4ced-aca5-0dbed60b1363 req-361d364f-7b23-4e6f-b965-242dc657dcc7 service nova] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Refreshing instance network info cache due to event network-changed-f78f097c-0df1-4f4f-8941-cf21c2b2ca4b. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1844.984316] env[63024]: DEBUG oslo_concurrency.lockutils [req-b201f3b6-f346-4ced-aca5-0dbed60b1363 req-361d364f-7b23-4e6f-b965-242dc657dcc7 service nova] Acquiring lock "refresh_cache-73db94b8-cfa8-4457-bccb-d4b780edbd93" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1845.047250] env[63024]: DEBUG oslo_vmware.api [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]527b51e2-f071-dcc3-dea3-f9dff2e6f48d, 'name': SearchDatastore_Task, 'duration_secs': 0.010265} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1845.047447] env[63024]: DEBUG oslo_concurrency.lockutils [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1845.048019] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 1448c924-7c61-4c43-a4e7-5a6dd45375cc/1448c924-7c61-4c43-a4e7-5a6dd45375cc.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1845.055813] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1e9aaa31-2aa1-4478-b748-bd69e3ae118f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.059521] env[63024]: DEBUG oslo_vmware.api [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951092, 'name': ReconfigVM_Task, 'duration_secs': 1.323872} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1845.059858] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Reconfigured VM instance instance-00000040 to attach disk [datastore1] 9bf1316e-f1ae-426e-a0a2-d814a2460c4d/9bf1316e-f1ae-426e-a0a2-d814a2460c4d.vmdk or device None with type streamOptimized {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1845.061367] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5affee23-9bbd-44c4-8a2d-da9ccb7634cb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.065567] env[63024]: DEBUG oslo_vmware.api [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Waiting for the task: (returnval){ [ 1845.065567] env[63024]: value = "task-1951094" [ 1845.065567] env[63024]: _type = "Task" [ 1845.065567] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1845.075096] env[63024]: DEBUG oslo_vmware.api [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1845.075096] env[63024]: value = "task-1951095" [ 1845.075096] env[63024]: _type = "Task" [ 1845.075096] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1845.082337] env[63024]: DEBUG oslo_vmware.api [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Task: {'id': task-1951094, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.087744] env[63024]: DEBUG oslo_vmware.api [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951095, 'name': Rename_Task} progress is 10%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.138730] env[63024]: DEBUG oslo_vmware.api [None req-2722ddcf-78d2-437a-bda6-4033bc97edc7 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951093, 'name': PowerOnVM_Task, 'duration_secs': 0.437262} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1845.139232] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-2722ddcf-78d2-437a-bda6-4033bc97edc7 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1845.139972] env[63024]: DEBUG nova.compute.manager [None req-2722ddcf-78d2-437a-bda6-4033bc97edc7 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1845.140258] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0206b205-d16b-4b36-ad2b-e908c044c9a2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.165685] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Releasing lock "refresh_cache-73db94b8-cfa8-4457-bccb-d4b780edbd93" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1845.166042] env[63024]: DEBUG nova.compute.manager [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Instance network_info: |[{"id": "f78f097c-0df1-4f4f-8941-cf21c2b2ca4b", "address": "fa:16:3e:df:09:6b", "network": {"id": "83ed1c04-a2e0-4c15-ae35-68e988607ce4", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-470202335-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5dcb1fcc9fd945cb9f4477fe1cce3f5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf78f097c-0d", "ovs_interfaceid": "f78f097c-0df1-4f4f-8941-cf21c2b2ca4b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1845.166379] env[63024]: DEBUG oslo_concurrency.lockutils [req-b201f3b6-f346-4ced-aca5-0dbed60b1363 req-361d364f-7b23-4e6f-b965-242dc657dcc7 service nova] Acquired lock "refresh_cache-73db94b8-cfa8-4457-bccb-d4b780edbd93" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1845.166565] env[63024]: DEBUG nova.network.neutron [req-b201f3b6-f346-4ced-aca5-0dbed60b1363 req-361d364f-7b23-4e6f-b965-242dc657dcc7 service nova] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Refreshing network info cache for port f78f097c-0df1-4f4f-8941-cf21c2b2ca4b {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1845.167903] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:df:09:6b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4406a73e-2189-46ac-9e96-4f0af80b5094', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f78f097c-0df1-4f4f-8941-cf21c2b2ca4b', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1845.179154] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Creating folder: Project (5dcb1fcc9fd945cb9f4477fe1cce3f5b). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1845.179968] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5c652ef0-3c00-4a5d-9051-855b247ef9de {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.197343] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Created folder: Project (5dcb1fcc9fd945cb9f4477fe1cce3f5b) in parent group-v401959. [ 1845.197560] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Creating folder: Instances. Parent ref: group-v402145. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1845.197790] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1394c828-1b7d-464f-a7b0-53c7c89776b4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.210594] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Created folder: Instances in parent group-v402145. [ 1845.210594] env[63024]: DEBUG oslo.service.loopingcall [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1845.211939] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1845.212347] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2642b303-6d86-4dfc-8c4b-b25f50b41f1d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.238585] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f23f103b-79dd-4e07-9170-3a35be9d2a45 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "fe6847e2-a742-4338-983f-698c13aaefde" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 88.634s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1845.238781] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1845.238781] env[63024]: value = "task-1951098" [ 1845.238781] env[63024]: _type = "Task" [ 1845.238781] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1845.249731] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951098, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.317402] env[63024]: DEBUG nova.network.neutron [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Successfully created port: 1ec87f7b-43ea-4f5e-b378-3919f7767904 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1845.444807] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7ddc1f7-8d9d-4d5f-9e6d-a689d3f98954 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.454280] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e36394d-fbf5-4ce9-96a0-e6634e856799 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.487510] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d5b9f17-e96d-4ac4-9893-ff831f17b585 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.496518] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dafa336c-32e8-43da-b1e8-d9e4882d2c7a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.512255] env[63024]: DEBUG nova.compute.provider_tree [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1845.579382] env[63024]: DEBUG oslo_vmware.api [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Task: {'id': task-1951094, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.591864] env[63024]: DEBUG oslo_vmware.api [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951095, 'name': Rename_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.748833] env[63024]: DEBUG nova.compute.manager [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1845.764022] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951098, 'name': CreateVM_Task, 'duration_secs': 0.399518} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1845.764022] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1845.764285] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1845.764710] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1845.765306] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1845.765794] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f2904f9-6984-4692-a35c-7753a913c234 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.772821] env[63024]: DEBUG oslo_vmware.api [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Waiting for the task: (returnval){ [ 1845.772821] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52dcb78a-2a16-f892-0a2a-f8dc003d8596" [ 1845.772821] env[63024]: _type = "Task" [ 1845.772821] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1845.782536] env[63024]: DEBUG oslo_vmware.api [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52dcb78a-2a16-f892-0a2a-f8dc003d8596, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1845.797623] env[63024]: DEBUG nova.compute.manager [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1845.825241] env[63024]: DEBUG nova.virt.hardware [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1845.825960] env[63024]: DEBUG nova.virt.hardware [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1845.825960] env[63024]: DEBUG nova.virt.hardware [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1845.825960] env[63024]: DEBUG nova.virt.hardware [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1845.825960] env[63024]: DEBUG nova.virt.hardware [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1845.826156] env[63024]: DEBUG nova.virt.hardware [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1845.826285] env[63024]: DEBUG nova.virt.hardware [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1845.826437] env[63024]: DEBUG nova.virt.hardware [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1845.826602] env[63024]: DEBUG nova.virt.hardware [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1845.826761] env[63024]: DEBUG nova.virt.hardware [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1845.826930] env[63024]: DEBUG nova.virt.hardware [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1845.827790] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-838074da-7df6-41e3-b311-1c43c477486d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.838142] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4461f85f-3dd9-4854-9526-725becefbf69 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1845.972935] env[63024]: DEBUG nova.network.neutron [req-b201f3b6-f346-4ced-aca5-0dbed60b1363 req-361d364f-7b23-4e6f-b965-242dc657dcc7 service nova] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Updated VIF entry in instance network info cache for port f78f097c-0df1-4f4f-8941-cf21c2b2ca4b. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1845.973100] env[63024]: DEBUG nova.network.neutron [req-b201f3b6-f346-4ced-aca5-0dbed60b1363 req-361d364f-7b23-4e6f-b965-242dc657dcc7 service nova] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Updating instance_info_cache with network_info: [{"id": "f78f097c-0df1-4f4f-8941-cf21c2b2ca4b", "address": "fa:16:3e:df:09:6b", "network": {"id": "83ed1c04-a2e0-4c15-ae35-68e988607ce4", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-470202335-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5dcb1fcc9fd945cb9f4477fe1cce3f5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf78f097c-0d", "ovs_interfaceid": "f78f097c-0df1-4f4f-8941-cf21c2b2ca4b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1846.018042] env[63024]: DEBUG nova.scheduler.client.report [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1846.078019] env[63024]: DEBUG oslo_vmware.api [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Task: {'id': task-1951094, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.54613} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1846.078368] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 1448c924-7c61-4c43-a4e7-5a6dd45375cc/1448c924-7c61-4c43-a4e7-5a6dd45375cc.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1846.078661] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1846.078952] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c1b698e0-82f0-4436-9e0e-89c14fcc883f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.090795] env[63024]: DEBUG oslo_vmware.api [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951095, 'name': Rename_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.092174] env[63024]: DEBUG oslo_vmware.api [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Waiting for the task: (returnval){ [ 1846.092174] env[63024]: value = "task-1951099" [ 1846.092174] env[63024]: _type = "Task" [ 1846.092174] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1846.100874] env[63024]: DEBUG oslo_vmware.api [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Task: {'id': task-1951099, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.281169] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1846.285086] env[63024]: DEBUG oslo_vmware.api [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52dcb78a-2a16-f892-0a2a-f8dc003d8596, 'name': SearchDatastore_Task, 'duration_secs': 0.012283} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1846.285388] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1846.285629] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1846.285849] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1846.285992] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1846.286187] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1846.286452] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f2be846d-b7d9-4c97-90ec-8e5e202dde9a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.295162] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1846.295348] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1846.296216] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5e1a56ac-8537-4eb2-84c2-a801fdeaf41f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.301623] env[63024]: DEBUG oslo_vmware.api [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Waiting for the task: (returnval){ [ 1846.301623] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e5f60a-e4ec-caf8-1d57-fe99e2a8f15b" [ 1846.301623] env[63024]: _type = "Task" [ 1846.301623] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1846.309598] env[63024]: DEBUG oslo_vmware.api [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e5f60a-e4ec-caf8-1d57-fe99e2a8f15b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.475548] env[63024]: DEBUG oslo_concurrency.lockutils [req-b201f3b6-f346-4ced-aca5-0dbed60b1363 req-361d364f-7b23-4e6f-b965-242dc657dcc7 service nova] Releasing lock "refresh_cache-73db94b8-cfa8-4457-bccb-d4b780edbd93" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1846.522805] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.752s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1846.523357] env[63024]: DEBUG nova.compute.manager [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1846.525994] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 28.683s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1846.593292] env[63024]: DEBUG oslo_vmware.api [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951095, 'name': Rename_Task, 'duration_secs': 1.150753} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1846.597970] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1846.598746] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d94ad744-d10b-4510-8308-866bd1175115 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.606257] env[63024]: DEBUG oslo_vmware.api [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Task: {'id': task-1951099, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066269} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1846.607446] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1846.607783] env[63024]: DEBUG oslo_vmware.api [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1846.607783] env[63024]: value = "task-1951100" [ 1846.607783] env[63024]: _type = "Task" [ 1846.607783] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1846.608458] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b117b838-4ce5-469f-ad3b-8ba4fec5b702 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.618241] env[63024]: DEBUG oslo_vmware.api [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951100, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.635932] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Reconfiguring VM instance instance-00000042 to attach disk [datastore1] 1448c924-7c61-4c43-a4e7-5a6dd45375cc/1448c924-7c61-4c43-a4e7-5a6dd45375cc.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1846.636606] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cc7274fc-128b-4f29-95bf-d2493f146f21 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.658275] env[63024]: DEBUG oslo_vmware.api [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Waiting for the task: (returnval){ [ 1846.658275] env[63024]: value = "task-1951101" [ 1846.658275] env[63024]: _type = "Task" [ 1846.658275] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1846.666665] env[63024]: DEBUG oslo_vmware.api [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Task: {'id': task-1951101, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1846.813351] env[63024]: DEBUG oslo_vmware.api [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e5f60a-e4ec-caf8-1d57-fe99e2a8f15b, 'name': SearchDatastore_Task, 'duration_secs': 0.008603} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1846.814217] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c77d484d-772f-48e4-8c3f-0756def9143a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1846.820270] env[63024]: DEBUG oslo_vmware.api [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Waiting for the task: (returnval){ [ 1846.820270] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52335d56-a367-0cbd-9e17-62ba50a8d7a3" [ 1846.820270] env[63024]: _type = "Task" [ 1846.820270] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1846.828736] env[63024]: DEBUG oslo_vmware.api [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52335d56-a367-0cbd-9e17-62ba50a8d7a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1847.031168] env[63024]: DEBUG nova.compute.utils [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1847.032832] env[63024]: DEBUG nova.compute.manager [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1847.032893] env[63024]: DEBUG nova.network.neutron [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1847.041625] env[63024]: INFO nova.compute.claims [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1847.060862] env[63024]: DEBUG nova.compute.manager [req-1207f2fd-42a9-4991-bb65-400df8d7a8e2 req-e1382e60-6d26-4699-8c19-eb403fba1c10 service nova] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Received event network-changed-a86b5113-d05e-45ac-bf54-833ea769eae5 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1847.060862] env[63024]: DEBUG nova.compute.manager [req-1207f2fd-42a9-4991-bb65-400df8d7a8e2 req-e1382e60-6d26-4699-8c19-eb403fba1c10 service nova] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Refreshing instance network info cache due to event network-changed-a86b5113-d05e-45ac-bf54-833ea769eae5. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1847.060862] env[63024]: DEBUG oslo_concurrency.lockutils [req-1207f2fd-42a9-4991-bb65-400df8d7a8e2 req-e1382e60-6d26-4699-8c19-eb403fba1c10 service nova] Acquiring lock "refresh_cache-fe6847e2-a742-4338-983f-698c13aaefde" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1847.060862] env[63024]: DEBUG oslo_concurrency.lockutils [req-1207f2fd-42a9-4991-bb65-400df8d7a8e2 req-e1382e60-6d26-4699-8c19-eb403fba1c10 service nova] Acquired lock "refresh_cache-fe6847e2-a742-4338-983f-698c13aaefde" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1847.061523] env[63024]: DEBUG nova.network.neutron [req-1207f2fd-42a9-4991-bb65-400df8d7a8e2 req-e1382e60-6d26-4699-8c19-eb403fba1c10 service nova] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Refreshing network info cache for port a86b5113-d05e-45ac-bf54-833ea769eae5 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1847.107616] env[63024]: DEBUG nova.policy [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8d3f9605a2384a919157a571cd164859', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a1166551532c473ca470379b16664513', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1847.125755] env[63024]: DEBUG oslo_vmware.api [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951100, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1847.168671] env[63024]: DEBUG oslo_vmware.api [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Task: {'id': task-1951101, 'name': ReconfigVM_Task, 'duration_secs': 0.45627} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1847.168954] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Reconfigured VM instance instance-00000042 to attach disk [datastore1] 1448c924-7c61-4c43-a4e7-5a6dd45375cc/1448c924-7c61-4c43-a4e7-5a6dd45375cc.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1847.170593] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8343006b-194e-4523-baf1-5e7e4b3de7e5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.173090] env[63024]: DEBUG nova.compute.manager [req-43a089c3-ee79-4f18-9e45-8e61f66a6f17 req-a62bdcae-3761-45c3-b931-243be0c1f236 service nova] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Received event network-vif-plugged-dcca960a-05bc-4b0e-b542-36ec3c6c3543 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1847.173504] env[63024]: DEBUG oslo_concurrency.lockutils [req-43a089c3-ee79-4f18-9e45-8e61f66a6f17 req-a62bdcae-3761-45c3-b931-243be0c1f236 service nova] Acquiring lock "e0a37f54-14ca-4eea-a9b3-6e652ca1e48d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1847.173580] env[63024]: DEBUG oslo_concurrency.lockutils [req-43a089c3-ee79-4f18-9e45-8e61f66a6f17 req-a62bdcae-3761-45c3-b931-243be0c1f236 service nova] Lock "e0a37f54-14ca-4eea-a9b3-6e652ca1e48d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1847.173715] env[63024]: DEBUG oslo_concurrency.lockutils [req-43a089c3-ee79-4f18-9e45-8e61f66a6f17 req-a62bdcae-3761-45c3-b931-243be0c1f236 service nova] Lock "e0a37f54-14ca-4eea-a9b3-6e652ca1e48d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1847.173882] env[63024]: DEBUG nova.compute.manager [req-43a089c3-ee79-4f18-9e45-8e61f66a6f17 req-a62bdcae-3761-45c3-b931-243be0c1f236 service nova] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] No waiting events found dispatching network-vif-plugged-dcca960a-05bc-4b0e-b542-36ec3c6c3543 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1847.174056] env[63024]: WARNING nova.compute.manager [req-43a089c3-ee79-4f18-9e45-8e61f66a6f17 req-a62bdcae-3761-45c3-b931-243be0c1f236 service nova] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Received unexpected event network-vif-plugged-dcca960a-05bc-4b0e-b542-36ec3c6c3543 for instance with vm_state building and task_state spawning. [ 1847.179183] env[63024]: DEBUG oslo_vmware.api [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Waiting for the task: (returnval){ [ 1847.179183] env[63024]: value = "task-1951102" [ 1847.179183] env[63024]: _type = "Task" [ 1847.179183] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1847.187747] env[63024]: DEBUG oslo_vmware.api [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Task: {'id': task-1951102, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1847.280416] env[63024]: DEBUG nova.network.neutron [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Successfully updated port: dcca960a-05bc-4b0e-b542-36ec3c6c3543 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1847.332902] env[63024]: DEBUG oslo_vmware.api [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52335d56-a367-0cbd-9e17-62ba50a8d7a3, 'name': SearchDatastore_Task, 'duration_secs': 0.011075} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1847.333194] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1847.333464] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 73db94b8-cfa8-4457-bccb-d4b780edbd93/73db94b8-cfa8-4457-bccb-d4b780edbd93.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1847.333723] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-43029fd8-ef9c-4286-8e0b-d98fd963095e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.340908] env[63024]: DEBUG oslo_vmware.api [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Waiting for the task: (returnval){ [ 1847.340908] env[63024]: value = "task-1951103" [ 1847.340908] env[63024]: _type = "Task" [ 1847.340908] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1847.351902] env[63024]: DEBUG oslo_vmware.api [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951103, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1847.393871] env[63024]: DEBUG nova.network.neutron [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Successfully created port: f660bb87-9d24-492b-adaf-d1471c95249a {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1847.547211] env[63024]: DEBUG nova.compute.manager [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1847.552783] env[63024]: INFO nova.compute.resource_tracker [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Updating resource usage from migration 787068cf-2789-4013-8b27-8a10a4f14022 [ 1847.622730] env[63024]: DEBUG oslo_vmware.api [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951100, 'name': PowerOnVM_Task, 'duration_secs': 0.653699} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1847.623033] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1847.625530] env[63024]: INFO nova.compute.manager [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Took 18.76 seconds to spawn the instance on the hypervisor. [ 1847.625732] env[63024]: DEBUG nova.compute.manager [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1847.626570] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7280c48-99ee-47d5-ba1d-01c493a4b12a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.697098] env[63024]: DEBUG oslo_vmware.api [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Task: {'id': task-1951102, 'name': Rename_Task, 'duration_secs': 0.191248} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1847.697420] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1847.697647] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-230edb46-fb6b-4c27-8291-39e0e1a694c9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1847.707888] env[63024]: DEBUG oslo_vmware.api [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Waiting for the task: (returnval){ [ 1847.707888] env[63024]: value = "task-1951104" [ 1847.707888] env[63024]: _type = "Task" [ 1847.707888] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1847.723355] env[63024]: DEBUG oslo_vmware.api [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Task: {'id': task-1951104, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1847.853102] env[63024]: DEBUG oslo_vmware.api [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951103, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.148710] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c38ee20-f594-421f-9d0a-3a71b68e7c4d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.152532] env[63024]: INFO nova.compute.manager [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Took 42.81 seconds to build instance. [ 1848.156901] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4c52a643-4c61-4c52-aa67-3bf3dd9d7ba4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Acquiring lock "c12774e4-77d1-4001-8d5d-0240dfed4ead" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1848.156901] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4c52a643-4c61-4c52-aa67-3bf3dd9d7ba4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Lock "c12774e4-77d1-4001-8d5d-0240dfed4ead" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1848.156901] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4c52a643-4c61-4c52-aa67-3bf3dd9d7ba4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Acquiring lock "c12774e4-77d1-4001-8d5d-0240dfed4ead-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1848.156901] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4c52a643-4c61-4c52-aa67-3bf3dd9d7ba4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Lock "c12774e4-77d1-4001-8d5d-0240dfed4ead-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1848.156901] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4c52a643-4c61-4c52-aa67-3bf3dd9d7ba4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Lock "c12774e4-77d1-4001-8d5d-0240dfed4ead-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1848.158050] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c24340b-ae6a-401f-b231-e48c282477c5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.161611] env[63024]: INFO nova.compute.manager [None req-4c52a643-4c61-4c52-aa67-3bf3dd9d7ba4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] Terminating instance [ 1848.191390] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae22469b-17f1-4e52-8a6e-28d1d89da3d4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.200063] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d17b1598-c9c2-4274-b329-902c86de05d1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.214668] env[63024]: DEBUG nova.compute.provider_tree [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1848.223185] env[63024]: DEBUG oslo_vmware.api [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Task: {'id': task-1951104, 'name': PowerOnVM_Task} progress is 78%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.271555] env[63024]: DEBUG nova.network.neutron [req-1207f2fd-42a9-4991-bb65-400df8d7a8e2 req-e1382e60-6d26-4699-8c19-eb403fba1c10 service nova] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Updated VIF entry in instance network info cache for port a86b5113-d05e-45ac-bf54-833ea769eae5. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1848.271945] env[63024]: DEBUG nova.network.neutron [req-1207f2fd-42a9-4991-bb65-400df8d7a8e2 req-e1382e60-6d26-4699-8c19-eb403fba1c10 service nova] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Updating instance_info_cache with network_info: [{"id": "a86b5113-d05e-45ac-bf54-833ea769eae5", "address": "fa:16:3e:7e:0d:a2", "network": {"id": "27687c23-521d-4beb-ad4f-278994149c2c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-381619610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.138", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e53c02ad56640dc8cbc8839669b67bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "09bf081b-cdf0-4977-abe2-2339a87409ab", "external-id": "nsx-vlan-transportzone-378", "segmentation_id": 378, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa86b5113-d0", "ovs_interfaceid": "a86b5113-d05e-45ac-bf54-833ea769eae5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1848.351857] env[63024]: DEBUG oslo_vmware.api [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951103, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.642416} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1848.352189] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 73db94b8-cfa8-4457-bccb-d4b780edbd93/73db94b8-cfa8-4457-bccb-d4b780edbd93.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1848.352444] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1848.352718] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-27a3f5b5-fb8b-4f83-8d92-d2b85861afaf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.359135] env[63024]: DEBUG oslo_vmware.api [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Waiting for the task: (returnval){ [ 1848.359135] env[63024]: value = "task-1951105" [ 1848.359135] env[63024]: _type = "Task" [ 1848.359135] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1848.370197] env[63024]: DEBUG oslo_vmware.api [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951105, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.563422] env[63024]: DEBUG nova.compute.manager [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1848.590460] env[63024]: DEBUG nova.virt.hardware [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1848.590699] env[63024]: DEBUG nova.virt.hardware [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1848.590852] env[63024]: DEBUG nova.virt.hardware [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1848.591034] env[63024]: DEBUG nova.virt.hardware [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1848.591203] env[63024]: DEBUG nova.virt.hardware [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1848.591355] env[63024]: DEBUG nova.virt.hardware [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1848.591557] env[63024]: DEBUG nova.virt.hardware [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1848.591710] env[63024]: DEBUG nova.virt.hardware [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1848.591871] env[63024]: DEBUG nova.virt.hardware [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1848.592039] env[63024]: DEBUG nova.virt.hardware [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1848.592205] env[63024]: DEBUG nova.virt.hardware [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1848.593023] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ae2fa9f-14cf-4c97-ae0a-fa06b34d5d24 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.601026] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bafb011-4750-4b96-a59e-6b63db1d9fff {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.658736] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e03c94db-e874-4173-aab9-17eeaab60640 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "9bf1316e-f1ae-426e-a0a2-d814a2460c4d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 92.856s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1848.668020] env[63024]: DEBUG nova.compute.manager [None req-4c52a643-4c61-4c52-aa67-3bf3dd9d7ba4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1848.668020] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4c52a643-4c61-4c52-aa67-3bf3dd9d7ba4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1848.668020] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f83fac07-50fe-43c7-be8a-bac92dc2e20c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.675819] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c52a643-4c61-4c52-aa67-3bf3dd9d7ba4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1848.676310] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-09610bfb-1720-49ee-ba72-028fbe20a10c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.687069] env[63024]: DEBUG oslo_vmware.api [None req-4c52a643-4c61-4c52-aa67-3bf3dd9d7ba4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Waiting for the task: (returnval){ [ 1848.687069] env[63024]: value = "task-1951106" [ 1848.687069] env[63024]: _type = "Task" [ 1848.687069] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1848.694770] env[63024]: DEBUG oslo_vmware.api [None req-4c52a643-4c61-4c52-aa67-3bf3dd9d7ba4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951106, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.726158] env[63024]: DEBUG nova.scheduler.client.report [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1848.736416] env[63024]: DEBUG oslo_vmware.api [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Task: {'id': task-1951104, 'name': PowerOnVM_Task, 'duration_secs': 1.010344} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1848.736416] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1848.736416] env[63024]: INFO nova.compute.manager [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Took 8.21 seconds to spawn the instance on the hypervisor. [ 1848.736416] env[63024]: DEBUG nova.compute.manager [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1848.736926] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1af3c5ef-4065-4d21-8041-002f0ba26873 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.774899] env[63024]: DEBUG oslo_concurrency.lockutils [req-1207f2fd-42a9-4991-bb65-400df8d7a8e2 req-e1382e60-6d26-4699-8c19-eb403fba1c10 service nova] Releasing lock "refresh_cache-fe6847e2-a742-4338-983f-698c13aaefde" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1848.872099] env[63024]: DEBUG oslo_vmware.api [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951105, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070861} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1848.872099] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1848.872749] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e2a6c3b-dda2-420d-b488-b4079f9c3b28 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.901021] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] 73db94b8-cfa8-4457-bccb-d4b780edbd93/73db94b8-cfa8-4457-bccb-d4b780edbd93.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1848.901021] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1fcbe5f9-a50f-44f5-87d5-f61be3506a7c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1848.921351] env[63024]: DEBUG oslo_vmware.api [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Waiting for the task: (returnval){ [ 1848.921351] env[63024]: value = "task-1951107" [ 1848.921351] env[63024]: _type = "Task" [ 1848.921351] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1848.929066] env[63024]: DEBUG oslo_vmware.api [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951107, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1848.992927] env[63024]: DEBUG oslo_concurrency.lockutils [None req-805b97dd-a2f3-4df4-a933-7ad58df3b2fd tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquiring lock "9bf1316e-f1ae-426e-a0a2-d814a2460c4d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1848.993231] env[63024]: DEBUG oslo_concurrency.lockutils [None req-805b97dd-a2f3-4df4-a933-7ad58df3b2fd tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "9bf1316e-f1ae-426e-a0a2-d814a2460c4d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1848.993448] env[63024]: DEBUG oslo_concurrency.lockutils [None req-805b97dd-a2f3-4df4-a933-7ad58df3b2fd tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquiring lock "9bf1316e-f1ae-426e-a0a2-d814a2460c4d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1848.993630] env[63024]: DEBUG oslo_concurrency.lockutils [None req-805b97dd-a2f3-4df4-a933-7ad58df3b2fd tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "9bf1316e-f1ae-426e-a0a2-d814a2460c4d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1848.993798] env[63024]: DEBUG oslo_concurrency.lockutils [None req-805b97dd-a2f3-4df4-a933-7ad58df3b2fd tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "9bf1316e-f1ae-426e-a0a2-d814a2460c4d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1848.995934] env[63024]: INFO nova.compute.manager [None req-805b97dd-a2f3-4df4-a933-7ad58df3b2fd tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Terminating instance [ 1849.018977] env[63024]: DEBUG nova.network.neutron [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Successfully updated port: f660bb87-9d24-492b-adaf-d1471c95249a {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1849.097187] env[63024]: DEBUG nova.compute.manager [req-e17274f0-8858-42ad-814b-99cdd40195ef req-7b1992b8-2043-43f2-9f76-e54f86be10da service nova] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Received event network-vif-plugged-f660bb87-9d24-492b-adaf-d1471c95249a {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1849.097415] env[63024]: DEBUG oslo_concurrency.lockutils [req-e17274f0-8858-42ad-814b-99cdd40195ef req-7b1992b8-2043-43f2-9f76-e54f86be10da service nova] Acquiring lock "52c17abc-78f0-417b-8675-e8d62bc8baa3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1849.097619] env[63024]: DEBUG oslo_concurrency.lockutils [req-e17274f0-8858-42ad-814b-99cdd40195ef req-7b1992b8-2043-43f2-9f76-e54f86be10da service nova] Lock "52c17abc-78f0-417b-8675-e8d62bc8baa3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1849.097783] env[63024]: DEBUG oslo_concurrency.lockutils [req-e17274f0-8858-42ad-814b-99cdd40195ef req-7b1992b8-2043-43f2-9f76-e54f86be10da service nova] Lock "52c17abc-78f0-417b-8675-e8d62bc8baa3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1849.097961] env[63024]: DEBUG nova.compute.manager [req-e17274f0-8858-42ad-814b-99cdd40195ef req-7b1992b8-2043-43f2-9f76-e54f86be10da service nova] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] No waiting events found dispatching network-vif-plugged-f660bb87-9d24-492b-adaf-d1471c95249a {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1849.098166] env[63024]: WARNING nova.compute.manager [req-e17274f0-8858-42ad-814b-99cdd40195ef req-7b1992b8-2043-43f2-9f76-e54f86be10da service nova] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Received unexpected event network-vif-plugged-f660bb87-9d24-492b-adaf-d1471c95249a for instance with vm_state building and task_state spawning. [ 1849.098269] env[63024]: DEBUG nova.compute.manager [req-e17274f0-8858-42ad-814b-99cdd40195ef req-7b1992b8-2043-43f2-9f76-e54f86be10da service nova] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Received event network-changed-f660bb87-9d24-492b-adaf-d1471c95249a {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1849.098419] env[63024]: DEBUG nova.compute.manager [req-e17274f0-8858-42ad-814b-99cdd40195ef req-7b1992b8-2043-43f2-9f76-e54f86be10da service nova] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Refreshing instance network info cache due to event network-changed-f660bb87-9d24-492b-adaf-d1471c95249a. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1849.098595] env[63024]: DEBUG oslo_concurrency.lockutils [req-e17274f0-8858-42ad-814b-99cdd40195ef req-7b1992b8-2043-43f2-9f76-e54f86be10da service nova] Acquiring lock "refresh_cache-52c17abc-78f0-417b-8675-e8d62bc8baa3" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1849.098726] env[63024]: DEBUG oslo_concurrency.lockutils [req-e17274f0-8858-42ad-814b-99cdd40195ef req-7b1992b8-2043-43f2-9f76-e54f86be10da service nova] Acquired lock "refresh_cache-52c17abc-78f0-417b-8675-e8d62bc8baa3" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1849.098875] env[63024]: DEBUG nova.network.neutron [req-e17274f0-8858-42ad-814b-99cdd40195ef req-7b1992b8-2043-43f2-9f76-e54f86be10da service nova] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Refreshing network info cache for port f660bb87-9d24-492b-adaf-d1471c95249a {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1849.162044] env[63024]: DEBUG nova.compute.manager [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1849.196195] env[63024]: DEBUG oslo_vmware.api [None req-4c52a643-4c61-4c52-aa67-3bf3dd9d7ba4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951106, 'name': PowerOffVM_Task, 'duration_secs': 0.192271} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1849.196643] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-4c52a643-4c61-4c52-aa67-3bf3dd9d7ba4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1849.196854] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4c52a643-4c61-4c52-aa67-3bf3dd9d7ba4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1849.197166] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c8312ba1-ce61-4775-8f56-b08a27347926 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.200810] env[63024]: DEBUG nova.compute.manager [req-e0e47022-c7fb-4c16-a657-5a5435215b70 req-4a94024f-e61a-461c-89a6-842f50569290 service nova] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Received event network-changed-dcca960a-05bc-4b0e-b542-36ec3c6c3543 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1849.200998] env[63024]: DEBUG nova.compute.manager [req-e0e47022-c7fb-4c16-a657-5a5435215b70 req-4a94024f-e61a-461c-89a6-842f50569290 service nova] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Refreshing instance network info cache due to event network-changed-dcca960a-05bc-4b0e-b542-36ec3c6c3543. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1849.201256] env[63024]: DEBUG oslo_concurrency.lockutils [req-e0e47022-c7fb-4c16-a657-5a5435215b70 req-4a94024f-e61a-461c-89a6-842f50569290 service nova] Acquiring lock "refresh_cache-e0a37f54-14ca-4eea-a9b3-6e652ca1e48d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1849.201399] env[63024]: DEBUG oslo_concurrency.lockutils [req-e0e47022-c7fb-4c16-a657-5a5435215b70 req-4a94024f-e61a-461c-89a6-842f50569290 service nova] Acquired lock "refresh_cache-e0a37f54-14ca-4eea-a9b3-6e652ca1e48d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1849.201845] env[63024]: DEBUG nova.network.neutron [req-e0e47022-c7fb-4c16-a657-5a5435215b70 req-4a94024f-e61a-461c-89a6-842f50569290 service nova] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Refreshing network info cache for port dcca960a-05bc-4b0e-b542-36ec3c6c3543 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1849.228574] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.702s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1849.228690] env[63024]: INFO nova.compute.manager [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Migrating [ 1849.234909] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c9141bc3-f30c-481e-b147-d3f10e57b8cd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.695s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1849.235148] env[63024]: DEBUG nova.objects.instance [None req-c9141bc3-f30c-481e-b147-d3f10e57b8cd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Lazy-loading 'resources' on Instance uuid 81f96b5a-b878-4e6c-9683-00528a4d5650 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1849.258247] env[63024]: INFO nova.compute.manager [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Took 35.79 seconds to build instance. [ 1849.337533] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4c52a643-4c61-4c52-aa67-3bf3dd9d7ba4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1849.337767] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4c52a643-4c61-4c52-aa67-3bf3dd9d7ba4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1849.337945] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c52a643-4c61-4c52-aa67-3bf3dd9d7ba4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Deleting the datastore file [datastore1] c12774e4-77d1-4001-8d5d-0240dfed4ead {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1849.338225] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-978374e6-f1e5-45c5-bd2c-4a012aad3bb7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.344748] env[63024]: DEBUG oslo_vmware.api [None req-4c52a643-4c61-4c52-aa67-3bf3dd9d7ba4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Waiting for the task: (returnval){ [ 1849.344748] env[63024]: value = "task-1951109" [ 1849.344748] env[63024]: _type = "Task" [ 1849.344748] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1849.352787] env[63024]: DEBUG oslo_vmware.api [None req-4c52a643-4c61-4c52-aa67-3bf3dd9d7ba4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951109, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1849.430346] env[63024]: DEBUG oslo_vmware.api [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951107, 'name': ReconfigVM_Task, 'duration_secs': 0.274429} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1849.430659] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Reconfigured VM instance instance-00000043 to attach disk [datastore1] 73db94b8-cfa8-4457-bccb-d4b780edbd93/73db94b8-cfa8-4457-bccb-d4b780edbd93.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1849.431368] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ebf4a181-97ff-400a-94c0-57af99e10fdd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.437764] env[63024]: DEBUG oslo_vmware.api [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Waiting for the task: (returnval){ [ 1849.437764] env[63024]: value = "task-1951110" [ 1849.437764] env[63024]: _type = "Task" [ 1849.437764] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1849.446272] env[63024]: DEBUG oslo_vmware.api [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951110, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1849.499617] env[63024]: DEBUG nova.compute.manager [None req-805b97dd-a2f3-4df4-a933-7ad58df3b2fd tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1849.499908] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-805b97dd-a2f3-4df4-a933-7ad58df3b2fd tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1849.500955] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b849527b-9f6f-4e2c-b946-43aea45b346a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.509160] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-805b97dd-a2f3-4df4-a933-7ad58df3b2fd tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1849.509437] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6a82c719-1114-4619-bc51-d0c34e9eedbc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.515258] env[63024]: DEBUG oslo_vmware.api [None req-805b97dd-a2f3-4df4-a933-7ad58df3b2fd tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1849.515258] env[63024]: value = "task-1951111" [ 1849.515258] env[63024]: _type = "Task" [ 1849.515258] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1849.523459] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Acquiring lock "refresh_cache-52c17abc-78f0-417b-8675-e8d62bc8baa3" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1849.523739] env[63024]: DEBUG oslo_vmware.api [None req-805b97dd-a2f3-4df4-a933-7ad58df3b2fd tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951111, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1849.638968] env[63024]: DEBUG nova.network.neutron [req-e17274f0-8858-42ad-814b-99cdd40195ef req-7b1992b8-2043-43f2-9f76-e54f86be10da service nova] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1849.688606] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1849.734836] env[63024]: DEBUG nova.network.neutron [req-e17274f0-8858-42ad-814b-99cdd40195ef req-7b1992b8-2043-43f2-9f76-e54f86be10da service nova] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1849.744703] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "refresh_cache-1709d916-d0c4-4706-b41b-8b0ed25f3331" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1849.744882] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquired lock "refresh_cache-1709d916-d0c4-4706-b41b-8b0ed25f3331" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1849.745081] env[63024]: DEBUG nova.network.neutron [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1849.764543] env[63024]: DEBUG oslo_concurrency.lockutils [None req-619938dd-6fa0-4163-9b49-6e2cc67b6a4c tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Lock "1448c924-7c61-4c43-a4e7-5a6dd45375cc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 87.979s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1849.788116] env[63024]: DEBUG nova.network.neutron [req-e0e47022-c7fb-4c16-a657-5a5435215b70 req-4a94024f-e61a-461c-89a6-842f50569290 service nova] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1849.861291] env[63024]: DEBUG oslo_vmware.api [None req-4c52a643-4c61-4c52-aa67-3bf3dd9d7ba4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951109, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140995} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1849.861291] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-4c52a643-4c61-4c52-aa67-3bf3dd9d7ba4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1849.861291] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4c52a643-4c61-4c52-aa67-3bf3dd9d7ba4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1849.861291] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4c52a643-4c61-4c52-aa67-3bf3dd9d7ba4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1849.861291] env[63024]: INFO nova.compute.manager [None req-4c52a643-4c61-4c52-aa67-3bf3dd9d7ba4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1849.862958] env[63024]: DEBUG oslo.service.loopingcall [None req-4c52a643-4c61-4c52-aa67-3bf3dd9d7ba4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1849.863324] env[63024]: DEBUG nova.compute.manager [-] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1849.865246] env[63024]: DEBUG nova.network.neutron [-] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1849.950779] env[63024]: DEBUG oslo_vmware.api [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951110, 'name': Rename_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1850.027169] env[63024]: DEBUG oslo_vmware.api [None req-805b97dd-a2f3-4df4-a933-7ad58df3b2fd tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951111, 'name': PowerOffVM_Task, 'duration_secs': 0.25448} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1850.029982] env[63024]: DEBUG nova.network.neutron [req-e0e47022-c7fb-4c16-a657-5a5435215b70 req-4a94024f-e61a-461c-89a6-842f50569290 service nova] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1850.031972] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-805b97dd-a2f3-4df4-a933-7ad58df3b2fd tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1850.032310] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-805b97dd-a2f3-4df4-a933-7ad58df3b2fd tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1850.033082] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3decb3b3-ae40-4b59-ad0a-b6e68241d8b0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.168380] env[63024]: DEBUG nova.network.neutron [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Successfully updated port: 1ec87f7b-43ea-4f5e-b378-3919f7767904 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1850.237722] env[63024]: DEBUG oslo_concurrency.lockutils [req-e17274f0-8858-42ad-814b-99cdd40195ef req-7b1992b8-2043-43f2-9f76-e54f86be10da service nova] Releasing lock "refresh_cache-52c17abc-78f0-417b-8675-e8d62bc8baa3" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1850.238134] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Acquired lock "refresh_cache-52c17abc-78f0-417b-8675-e8d62bc8baa3" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1850.238293] env[63024]: DEBUG nova.network.neutron [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1850.268020] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0b8a3f4-b4fc-409a-bb90-1575e4109eeb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.272643] env[63024]: DEBUG nova.compute.manager [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1850.276986] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-805b97dd-a2f3-4df4-a933-7ad58df3b2fd tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1850.276986] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-805b97dd-a2f3-4df4-a933-7ad58df3b2fd tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1850.276986] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-805b97dd-a2f3-4df4-a933-7ad58df3b2fd tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Deleting the datastore file [datastore1] 9bf1316e-f1ae-426e-a0a2-d814a2460c4d {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1850.280619] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d87f04b2-d829-4af2-8837-bb2d13ecd36f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.285688] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-318bf913-29c7-40db-8d6e-fa8d0b3dc1bf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.290538] env[63024]: DEBUG oslo_vmware.api [None req-805b97dd-a2f3-4df4-a933-7ad58df3b2fd tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1850.290538] env[63024]: value = "task-1951113" [ 1850.290538] env[63024]: _type = "Task" [ 1850.290538] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1850.321429] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7c0e6e1-bb8b-47d8-b2ca-906b3f08efd2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.327007] env[63024]: DEBUG oslo_vmware.api [None req-805b97dd-a2f3-4df4-a933-7ad58df3b2fd tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951113, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1850.331878] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1111dc01-7bbc-4900-9811-641303d2d85c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.345704] env[63024]: DEBUG nova.compute.provider_tree [None req-c9141bc3-f30c-481e-b147-d3f10e57b8cd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1850.452788] env[63024]: DEBUG oslo_vmware.api [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951110, 'name': Rename_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1850.533857] env[63024]: DEBUG oslo_concurrency.lockutils [req-e0e47022-c7fb-4c16-a657-5a5435215b70 req-4a94024f-e61a-461c-89a6-842f50569290 service nova] Releasing lock "refresh_cache-e0a37f54-14ca-4eea-a9b3-6e652ca1e48d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1850.543138] env[63024]: DEBUG nova.network.neutron [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Updating instance_info_cache with network_info: [{"id": "611e1e79-ffb8-4ba9-8718-b57360eaa492", "address": "fa:16:3e:f8:21:2c", "network": {"id": "67ab6461-8fa5-4dfd-9267-561a710ae91b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1814728209-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1886be852b01400aaf7a31c8fe5d4d7a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap611e1e79-ff", "ovs_interfaceid": "611e1e79-ffb8-4ba9-8718-b57360eaa492", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1850.674514] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Acquiring lock "refresh_cache-e0a37f54-14ca-4eea-a9b3-6e652ca1e48d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1850.674630] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Acquired lock "refresh_cache-e0a37f54-14ca-4eea-a9b3-6e652ca1e48d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1850.674991] env[63024]: DEBUG nova.network.neutron [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1850.676183] env[63024]: DEBUG nova.network.neutron [-] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1850.767354] env[63024]: INFO nova.compute.manager [None req-4edf4386-8895-4c54-9365-4be4b7b3eecc tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Rescuing [ 1850.767354] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4edf4386-8895-4c54-9365-4be4b7b3eecc tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Acquiring lock "refresh_cache-1448c924-7c61-4c43-a4e7-5a6dd45375cc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1850.767354] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4edf4386-8895-4c54-9365-4be4b7b3eecc tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Acquired lock "refresh_cache-1448c924-7c61-4c43-a4e7-5a6dd45375cc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1850.767354] env[63024]: DEBUG nova.network.neutron [None req-4edf4386-8895-4c54-9365-4be4b7b3eecc tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1850.781736] env[63024]: DEBUG nova.network.neutron [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1850.799469] env[63024]: DEBUG oslo_concurrency.lockutils [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1850.802926] env[63024]: DEBUG oslo_vmware.api [None req-805b97dd-a2f3-4df4-a933-7ad58df3b2fd tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951113, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167396} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1850.803428] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-805b97dd-a2f3-4df4-a933-7ad58df3b2fd tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1850.803615] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-805b97dd-a2f3-4df4-a933-7ad58df3b2fd tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1850.803781] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-805b97dd-a2f3-4df4-a933-7ad58df3b2fd tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1850.803942] env[63024]: INFO nova.compute.manager [None req-805b97dd-a2f3-4df4-a933-7ad58df3b2fd tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Took 1.30 seconds to destroy the instance on the hypervisor. [ 1850.804195] env[63024]: DEBUG oslo.service.loopingcall [None req-805b97dd-a2f3-4df4-a933-7ad58df3b2fd tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1850.804662] env[63024]: DEBUG nova.compute.manager [-] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1850.804662] env[63024]: DEBUG nova.network.neutron [-] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1850.848779] env[63024]: DEBUG nova.scheduler.client.report [None req-c9141bc3-f30c-481e-b147-d3f10e57b8cd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1850.919918] env[63024]: DEBUG nova.network.neutron [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Updating instance_info_cache with network_info: [{"id": "f660bb87-9d24-492b-adaf-d1471c95249a", "address": "fa:16:3e:2b:d9:d3", "network": {"id": "9e97434d-c36c-478d-a559-df5b5d8bcd77", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-630350621-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a1166551532c473ca470379b16664513", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4055505f-97ab-400b-969c-43d99b38fd48", "external-id": "nsx-vlan-transportzone-952", "segmentation_id": 952, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf660bb87-9d", "ovs_interfaceid": "f660bb87-9d24-492b-adaf-d1471c95249a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1850.949295] env[63024]: DEBUG oslo_vmware.api [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951110, 'name': Rename_Task, 'duration_secs': 1.139887} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1850.949586] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1850.949827] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1cdd91af-2022-4e6f-b4c7-0aae816f3d7c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1850.956251] env[63024]: DEBUG oslo_vmware.api [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Waiting for the task: (returnval){ [ 1850.956251] env[63024]: value = "task-1951114" [ 1850.956251] env[63024]: _type = "Task" [ 1850.956251] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1850.963906] env[63024]: DEBUG oslo_vmware.api [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951114, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1851.045758] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Releasing lock "refresh_cache-1709d916-d0c4-4706-b41b-8b0ed25f3331" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1851.170907] env[63024]: DEBUG nova.compute.manager [req-bfbe9dae-a81c-4351-aba6-fa75fc7ec6ab req-09c6c85f-6ebd-4983-bbd0-df236de4c9d4 service nova] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] Received event network-vif-deleted-c38c5b06-5451-45cd-9156-009ba98e5596 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1851.171015] env[63024]: DEBUG nova.compute.manager [req-bfbe9dae-a81c-4351-aba6-fa75fc7ec6ab req-09c6c85f-6ebd-4983-bbd0-df236de4c9d4 service nova] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Received event network-vif-deleted-3e52f5fe-2e1c-4296-977e-cdbb6a36291e {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1851.171250] env[63024]: INFO nova.compute.manager [req-bfbe9dae-a81c-4351-aba6-fa75fc7ec6ab req-09c6c85f-6ebd-4983-bbd0-df236de4c9d4 service nova] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Neutron deleted interface 3e52f5fe-2e1c-4296-977e-cdbb6a36291e; detaching it from the instance and deleting it from the info cache [ 1851.171413] env[63024]: DEBUG nova.network.neutron [req-bfbe9dae-a81c-4351-aba6-fa75fc7ec6ab req-09c6c85f-6ebd-4983-bbd0-df236de4c9d4 service nova] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1851.182803] env[63024]: INFO nova.compute.manager [-] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] Took 1.32 seconds to deallocate network for instance. [ 1851.216729] env[63024]: DEBUG nova.network.neutron [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1851.232689] env[63024]: DEBUG nova.compute.manager [req-47dbb63d-d703-49bb-b2fc-304a4e9078b8 req-1f4e838e-5442-465c-a1d4-359452b63871 service nova] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Received event network-vif-plugged-1ec87f7b-43ea-4f5e-b378-3919f7767904 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1851.232949] env[63024]: DEBUG oslo_concurrency.lockutils [req-47dbb63d-d703-49bb-b2fc-304a4e9078b8 req-1f4e838e-5442-465c-a1d4-359452b63871 service nova] Acquiring lock "e0a37f54-14ca-4eea-a9b3-6e652ca1e48d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1851.233196] env[63024]: DEBUG oslo_concurrency.lockutils [req-47dbb63d-d703-49bb-b2fc-304a4e9078b8 req-1f4e838e-5442-465c-a1d4-359452b63871 service nova] Lock "e0a37f54-14ca-4eea-a9b3-6e652ca1e48d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1851.233375] env[63024]: DEBUG oslo_concurrency.lockutils [req-47dbb63d-d703-49bb-b2fc-304a4e9078b8 req-1f4e838e-5442-465c-a1d4-359452b63871 service nova] Lock "e0a37f54-14ca-4eea-a9b3-6e652ca1e48d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1851.233576] env[63024]: DEBUG nova.compute.manager [req-47dbb63d-d703-49bb-b2fc-304a4e9078b8 req-1f4e838e-5442-465c-a1d4-359452b63871 service nova] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] No waiting events found dispatching network-vif-plugged-1ec87f7b-43ea-4f5e-b378-3919f7767904 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1851.233761] env[63024]: WARNING nova.compute.manager [req-47dbb63d-d703-49bb-b2fc-304a4e9078b8 req-1f4e838e-5442-465c-a1d4-359452b63871 service nova] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Received unexpected event network-vif-plugged-1ec87f7b-43ea-4f5e-b378-3919f7767904 for instance with vm_state building and task_state spawning. [ 1851.234000] env[63024]: DEBUG nova.compute.manager [req-47dbb63d-d703-49bb-b2fc-304a4e9078b8 req-1f4e838e-5442-465c-a1d4-359452b63871 service nova] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Received event network-changed-1ec87f7b-43ea-4f5e-b378-3919f7767904 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1851.234116] env[63024]: DEBUG nova.compute.manager [req-47dbb63d-d703-49bb-b2fc-304a4e9078b8 req-1f4e838e-5442-465c-a1d4-359452b63871 service nova] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Refreshing instance network info cache due to event network-changed-1ec87f7b-43ea-4f5e-b378-3919f7767904. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1851.234279] env[63024]: DEBUG oslo_concurrency.lockutils [req-47dbb63d-d703-49bb-b2fc-304a4e9078b8 req-1f4e838e-5442-465c-a1d4-359452b63871 service nova] Acquiring lock "refresh_cache-e0a37f54-14ca-4eea-a9b3-6e652ca1e48d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1851.354451] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c9141bc3-f30c-481e-b147-d3f10e57b8cd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.119s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1851.357169] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.713s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1851.358713] env[63024]: INFO nova.compute.claims [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1851.375057] env[63024]: INFO nova.scheduler.client.report [None req-c9141bc3-f30c-481e-b147-d3f10e57b8cd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Deleted allocations for instance 81f96b5a-b878-4e6c-9683-00528a4d5650 [ 1851.424452] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Releasing lock "refresh_cache-52c17abc-78f0-417b-8675-e8d62bc8baa3" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1851.424875] env[63024]: DEBUG nova.compute.manager [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Instance network_info: |[{"id": "f660bb87-9d24-492b-adaf-d1471c95249a", "address": "fa:16:3e:2b:d9:d3", "network": {"id": "9e97434d-c36c-478d-a559-df5b5d8bcd77", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-630350621-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a1166551532c473ca470379b16664513", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4055505f-97ab-400b-969c-43d99b38fd48", "external-id": "nsx-vlan-transportzone-952", "segmentation_id": 952, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf660bb87-9d", "ovs_interfaceid": "f660bb87-9d24-492b-adaf-d1471c95249a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1851.425461] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2b:d9:d3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4055505f-97ab-400b-969c-43d99b38fd48', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f660bb87-9d24-492b-adaf-d1471c95249a', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1851.434778] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Creating folder: Project (a1166551532c473ca470379b16664513). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1851.436192] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6aa49e29-3458-4d30-90c1-9fac38560685 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.450538] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Created folder: Project (a1166551532c473ca470379b16664513) in parent group-v401959. [ 1851.450759] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Creating folder: Instances. Parent ref: group-v402148. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1851.450988] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3820fdcf-50c0-4494-abbd-ef95b7db11d9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.464539] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Created folder: Instances in parent group-v402148. [ 1851.465190] env[63024]: DEBUG oslo.service.loopingcall [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1851.467836] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1851.467836] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8018f594-fe05-4b0e-9c1a-542b8fce5928 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.486573] env[63024]: DEBUG oslo_vmware.api [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951114, 'name': PowerOnVM_Task, 'duration_secs': 0.488707} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1851.487236] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1851.487477] env[63024]: INFO nova.compute.manager [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Took 8.38 seconds to spawn the instance on the hypervisor. [ 1851.487661] env[63024]: DEBUG nova.compute.manager [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1851.488464] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6be1e5ed-216a-4bbd-8154-ee596dbb1655 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.492245] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1851.492245] env[63024]: value = "task-1951117" [ 1851.492245] env[63024]: _type = "Task" [ 1851.492245] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1851.507290] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951117, 'name': CreateVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1851.542578] env[63024]: DEBUG nova.network.neutron [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Updating instance_info_cache with network_info: [{"id": "dcca960a-05bc-4b0e-b542-36ec3c6c3543", "address": "fa:16:3e:b6:57:00", "network": {"id": "79226672-68e0-4c51-b157-fd2bec47c28d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-771192213", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.214", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "674f344eaf784662ac922405620a3ac4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdcca960a-05", "ovs_interfaceid": "dcca960a-05bc-4b0e-b542-36ec3c6c3543", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1ec87f7b-43ea-4f5e-b378-3919f7767904", "address": "fa:16:3e:b7:2e:ea", "network": {"id": "bf1e4c1a-02c4-4b0c-a575-c0d58ce27323", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-361265617", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "674f344eaf784662ac922405620a3ac4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9297313e-7c50-4873-93d3-67284929163a", "external-id": "nsx-vlan-transportzone-620", "segmentation_id": 620, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ec87f7b-43", "ovs_interfaceid": "1ec87f7b-43ea-4f5e-b378-3919f7767904", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1851.545829] env[63024]: DEBUG nova.network.neutron [-] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1851.677025] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0f5ddc42-4493-4648-b9d7-cc3fb543268c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.678516] env[63024]: DEBUG nova.network.neutron [None req-4edf4386-8895-4c54-9365-4be4b7b3eecc tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Updating instance_info_cache with network_info: [{"id": "9241a9a2-d60f-4c1f-a092-5caa7b738112", "address": "fa:16:3e:2a:d5:7a", "network": {"id": "ce29ceef-bd62-4366-81a9-4c99d66ac178", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-898882035-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "4c262cc280074a0bb3b8967f2cbb7c73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec3f9e71-839a-429d-b211-d3dfc98ca4f6", "external-id": "nsx-vlan-transportzone-5", "segmentation_id": 5, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9241a9a2-d6", "ovs_interfaceid": "9241a9a2-d60f-4c1f-a092-5caa7b738112", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1851.685619] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04e94191-bf68-4646-a206-ae8b5af0178f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1851.700113] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4c52a643-4c61-4c52-aa67-3bf3dd9d7ba4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1851.730249] env[63024]: DEBUG nova.compute.manager [req-bfbe9dae-a81c-4351-aba6-fa75fc7ec6ab req-09c6c85f-6ebd-4983-bbd0-df236de4c9d4 service nova] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Detach interface failed, port_id=3e52f5fe-2e1c-4296-977e-cdbb6a36291e, reason: Instance 9bf1316e-f1ae-426e-a0a2-d814a2460c4d could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 1851.889727] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c9141bc3-f30c-481e-b147-d3f10e57b8cd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Lock "81f96b5a-b878-4e6c-9683-00528a4d5650" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.791s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1852.007291] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951117, 'name': CreateVM_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1852.011320] env[63024]: INFO nova.compute.manager [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Took 36.39 seconds to build instance. [ 1852.045873] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Releasing lock "refresh_cache-e0a37f54-14ca-4eea-a9b3-6e652ca1e48d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1852.046298] env[63024]: DEBUG nova.compute.manager [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Instance network_info: |[{"id": "dcca960a-05bc-4b0e-b542-36ec3c6c3543", "address": "fa:16:3e:b6:57:00", "network": {"id": "79226672-68e0-4c51-b157-fd2bec47c28d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-771192213", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.214", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "674f344eaf784662ac922405620a3ac4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdcca960a-05", "ovs_interfaceid": "dcca960a-05bc-4b0e-b542-36ec3c6c3543", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1ec87f7b-43ea-4f5e-b378-3919f7767904", "address": "fa:16:3e:b7:2e:ea", "network": {"id": "bf1e4c1a-02c4-4b0c-a575-c0d58ce27323", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-361265617", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "674f344eaf784662ac922405620a3ac4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9297313e-7c50-4873-93d3-67284929163a", "external-id": "nsx-vlan-transportzone-620", "segmentation_id": 620, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ec87f7b-43", "ovs_interfaceid": "1ec87f7b-43ea-4f5e-b378-3919f7767904", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1852.046630] env[63024]: DEBUG oslo_concurrency.lockutils [req-47dbb63d-d703-49bb-b2fc-304a4e9078b8 req-1f4e838e-5442-465c-a1d4-359452b63871 service nova] Acquired lock "refresh_cache-e0a37f54-14ca-4eea-a9b3-6e652ca1e48d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1852.046804] env[63024]: DEBUG nova.network.neutron [req-47dbb63d-d703-49bb-b2fc-304a4e9078b8 req-1f4e838e-5442-465c-a1d4-359452b63871 service nova] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Refreshing network info cache for port 1ec87f7b-43ea-4f5e-b378-3919f7767904 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1852.047993] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b6:57:00', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f880ac2e-d532-4f54-87bb-998a8d1bca78', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dcca960a-05bc-4b0e-b542-36ec3c6c3543', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:b7:2e:ea', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9297313e-7c50-4873-93d3-67284929163a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1ec87f7b-43ea-4f5e-b378-3919f7767904', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1852.057277] env[63024]: DEBUG oslo.service.loopingcall [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1852.058399] env[63024]: INFO nova.compute.manager [-] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Took 1.25 seconds to deallocate network for instance. [ 1852.060842] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1852.062669] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0f66a45a-c352-41d0-a53a-0bd45249613d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.089065] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1852.089065] env[63024]: value = "task-1951118" [ 1852.089065] env[63024]: _type = "Task" [ 1852.089065] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1852.098198] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951118, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1852.181851] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4edf4386-8895-4c54-9365-4be4b7b3eecc tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Releasing lock "refresh_cache-1448c924-7c61-4c43-a4e7-5a6dd45375cc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1852.505839] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951117, 'name': CreateVM_Task, 'duration_secs': 0.605481} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1852.505997] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1852.506706] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1852.506878] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1852.507239] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1852.507506] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-088a946c-4479-4e03-8b96-bd7b46d72a18 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.512203] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Waiting for the task: (returnval){ [ 1852.512203] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b3afea-1cca-f24f-a94b-a466786ffcd1" [ 1852.512203] env[63024]: _type = "Task" [ 1852.512203] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1852.518296] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cbd12695-95af-4ca7-8b73-3ac439c8b1ff tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lock "73db94b8-cfa8-4457-bccb-d4b780edbd93" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.759s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1852.524987] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b3afea-1cca-f24f-a94b-a466786ffcd1, 'name': SearchDatastore_Task, 'duration_secs': 0.008882} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1852.525665] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1852.525957] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1852.526642] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1852.526642] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1852.526642] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1852.526814] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e46ea454-cf05-4a13-b093-70b28592ff63 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.535413] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1852.535669] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1852.536546] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1902a16d-5298-458f-a64a-68707f52a60b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.541946] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Waiting for the task: (returnval){ [ 1852.541946] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52416d1d-d1ff-fbd0-2f14-156d5fe2c7cf" [ 1852.541946] env[63024]: _type = "Task" [ 1852.541946] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1852.552082] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52416d1d-d1ff-fbd0-2f14-156d5fe2c7cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1852.564531] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44eef9b8-ef66-48f7-9f13-c603dc017e45 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.583962] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Updating instance '1709d916-d0c4-4706-b41b-8b0ed25f3331' progress to 0 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1852.590709] env[63024]: DEBUG oslo_concurrency.lockutils [None req-805b97dd-a2f3-4df4-a933-7ad58df3b2fd tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1852.599354] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951118, 'name': CreateVM_Task, 'duration_secs': 0.451527} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1852.599518] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1852.602682] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1852.602682] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1852.602976] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1852.603675] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35c6a7a3-2be0-4bd5-b1ba-5583f0eff707 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.608476] env[63024]: DEBUG oslo_vmware.api [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Waiting for the task: (returnval){ [ 1852.608476] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52d34b5f-f9e5-f212-69fd-2ba8f5a74e91" [ 1852.608476] env[63024]: _type = "Task" [ 1852.608476] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1852.619218] env[63024]: DEBUG oslo_vmware.api [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52d34b5f-f9e5-f212-69fd-2ba8f5a74e91, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1852.896696] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b9b968a-df0d-44eb-9aa4-f54b5eb053b6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.906944] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96ef6310-3ad1-4a5a-ab30-f9a118b8842e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.914591] env[63024]: DEBUG nova.network.neutron [req-47dbb63d-d703-49bb-b2fc-304a4e9078b8 req-1f4e838e-5442-465c-a1d4-359452b63871 service nova] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Updated VIF entry in instance network info cache for port 1ec87f7b-43ea-4f5e-b378-3919f7767904. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1852.914988] env[63024]: DEBUG nova.network.neutron [req-47dbb63d-d703-49bb-b2fc-304a4e9078b8 req-1f4e838e-5442-465c-a1d4-359452b63871 service nova] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Updating instance_info_cache with network_info: [{"id": "dcca960a-05bc-4b0e-b542-36ec3c6c3543", "address": "fa:16:3e:b6:57:00", "network": {"id": "79226672-68e0-4c51-b157-fd2bec47c28d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-771192213", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.214", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "674f344eaf784662ac922405620a3ac4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdcca960a-05", "ovs_interfaceid": "dcca960a-05bc-4b0e-b542-36ec3c6c3543", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "1ec87f7b-43ea-4f5e-b378-3919f7767904", "address": "fa:16:3e:b7:2e:ea", "network": {"id": "bf1e4c1a-02c4-4b0c-a575-c0d58ce27323", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-361265617", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "674f344eaf784662ac922405620a3ac4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9297313e-7c50-4873-93d3-67284929163a", "external-id": "nsx-vlan-transportzone-620", "segmentation_id": 620, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ec87f7b-43", "ovs_interfaceid": "1ec87f7b-43ea-4f5e-b378-3919f7767904", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1852.945907] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45f279ed-b4b5-4734-991c-acc3ee763652 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.953790] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-883ad457-8dd5-456d-a705-cdff3a96e96d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1852.967647] env[63024]: DEBUG nova.compute.provider_tree [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1853.021705] env[63024]: DEBUG nova.compute.manager [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1853.056148] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52416d1d-d1ff-fbd0-2f14-156d5fe2c7cf, 'name': SearchDatastore_Task, 'duration_secs': 0.00936} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1853.056982] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4dddc4d5-e8f6-432d-85ba-a91edba84134 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.062561] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Waiting for the task: (returnval){ [ 1853.062561] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5259bfba-4f8d-d7bb-ca83-aa8a02031463" [ 1853.062561] env[63024]: _type = "Task" [ 1853.062561] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1853.070967] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5259bfba-4f8d-d7bb-ca83-aa8a02031463, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1853.093594] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1853.093594] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c4368a83-389e-4319-95c0-b4fc298ad315 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.100181] env[63024]: DEBUG oslo_vmware.api [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1853.100181] env[63024]: value = "task-1951120" [ 1853.100181] env[63024]: _type = "Task" [ 1853.100181] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1853.107400] env[63024]: DEBUG oslo_vmware.api [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951120, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1853.116459] env[63024]: DEBUG oslo_vmware.api [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52d34b5f-f9e5-f212-69fd-2ba8f5a74e91, 'name': SearchDatastore_Task, 'duration_secs': 0.009177} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1853.116742] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1853.116985] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1853.117194] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1853.196533] env[63024]: DEBUG nova.compute.manager [req-33298a63-419a-43f7-a452-d655b1ec1311 req-7cdcdee9-42d9-4a4a-8837-48f7fc9dca82 service nova] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Received event network-changed-f78f097c-0df1-4f4f-8941-cf21c2b2ca4b {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1853.196633] env[63024]: DEBUG nova.compute.manager [req-33298a63-419a-43f7-a452-d655b1ec1311 req-7cdcdee9-42d9-4a4a-8837-48f7fc9dca82 service nova] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Refreshing instance network info cache due to event network-changed-f78f097c-0df1-4f4f-8941-cf21c2b2ca4b. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1853.196824] env[63024]: DEBUG oslo_concurrency.lockutils [req-33298a63-419a-43f7-a452-d655b1ec1311 req-7cdcdee9-42d9-4a4a-8837-48f7fc9dca82 service nova] Acquiring lock "refresh_cache-73db94b8-cfa8-4457-bccb-d4b780edbd93" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1853.196970] env[63024]: DEBUG oslo_concurrency.lockutils [req-33298a63-419a-43f7-a452-d655b1ec1311 req-7cdcdee9-42d9-4a4a-8837-48f7fc9dca82 service nova] Acquired lock "refresh_cache-73db94b8-cfa8-4457-bccb-d4b780edbd93" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1853.197220] env[63024]: DEBUG nova.network.neutron [req-33298a63-419a-43f7-a452-d655b1ec1311 req-7cdcdee9-42d9-4a4a-8837-48f7fc9dca82 service nova] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Refreshing network info cache for port f78f097c-0df1-4f4f-8941-cf21c2b2ca4b {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1853.213709] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-4edf4386-8895-4c54-9365-4be4b7b3eecc tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1853.216126] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e5244c44-7ab4-4247-bda8-60a1d7b56649 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.223092] env[63024]: DEBUG oslo_vmware.api [None req-4edf4386-8895-4c54-9365-4be4b7b3eecc tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Waiting for the task: (returnval){ [ 1853.223092] env[63024]: value = "task-1951121" [ 1853.223092] env[63024]: _type = "Task" [ 1853.223092] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1853.231170] env[63024]: DEBUG oslo_vmware.api [None req-4edf4386-8895-4c54-9365-4be4b7b3eecc tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Task: {'id': task-1951121, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1853.421296] env[63024]: DEBUG oslo_concurrency.lockutils [req-47dbb63d-d703-49bb-b2fc-304a4e9078b8 req-1f4e838e-5442-465c-a1d4-359452b63871 service nova] Releasing lock "refresh_cache-e0a37f54-14ca-4eea-a9b3-6e652ca1e48d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1853.470441] env[63024]: DEBUG nova.scheduler.client.report [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1853.543264] env[63024]: DEBUG oslo_concurrency.lockutils [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1853.573391] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5259bfba-4f8d-d7bb-ca83-aa8a02031463, 'name': SearchDatastore_Task, 'duration_secs': 0.009636} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1853.573703] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1853.573958] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 52c17abc-78f0-417b-8675-e8d62bc8baa3/52c17abc-78f0-417b-8675-e8d62bc8baa3.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1853.574258] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1853.574451] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1853.574663] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f76f73bd-a9c9-4262-b369-3e712d1e98c6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.576641] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0674ba9d-0950-4158-8b70-7b51d23304aa {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.583538] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Waiting for the task: (returnval){ [ 1853.583538] env[63024]: value = "task-1951122" [ 1853.583538] env[63024]: _type = "Task" [ 1853.583538] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1853.587211] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1853.587385] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1853.588394] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c0d1e954-8d94-411f-8b3e-da6c87d8fb5a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.593711] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951122, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1853.594890] env[63024]: DEBUG oslo_vmware.api [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Waiting for the task: (returnval){ [ 1853.594890] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]520d09ea-ec12-2318-e696-9c33016dde5a" [ 1853.594890] env[63024]: _type = "Task" [ 1853.594890] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1853.603163] env[63024]: DEBUG oslo_vmware.api [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]520d09ea-ec12-2318-e696-9c33016dde5a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1853.610446] env[63024]: DEBUG oslo_vmware.api [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951120, 'name': PowerOffVM_Task, 'duration_secs': 0.341684} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1853.610688] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1853.610866] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Updating instance '1709d916-d0c4-4706-b41b-8b0ed25f3331' progress to 17 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1853.735418] env[63024]: DEBUG oslo_vmware.api [None req-4edf4386-8895-4c54-9365-4be4b7b3eecc tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Task: {'id': task-1951121, 'name': PowerOffVM_Task, 'duration_secs': 0.230244} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1853.735701] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-4edf4386-8895-4c54-9365-4be4b7b3eecc tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1853.736527] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ed41171-5705-43cf-b2aa-cd54734acf2f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.760138] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44b2e6b6-53c2-468f-bfd6-19bcf596fdfb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.806108] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-4edf4386-8895-4c54-9365-4be4b7b3eecc tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1853.806422] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-204780c7-c95f-476c-9478-b01110c66247 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1853.813353] env[63024]: DEBUG oslo_vmware.api [None req-4edf4386-8895-4c54-9365-4be4b7b3eecc tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Waiting for the task: (returnval){ [ 1853.813353] env[63024]: value = "task-1951123" [ 1853.813353] env[63024]: _type = "Task" [ 1853.813353] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1853.821888] env[63024]: DEBUG oslo_vmware.api [None req-4edf4386-8895-4c54-9365-4be4b7b3eecc tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Task: {'id': task-1951123, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1853.976155] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.619s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1853.976659] env[63024]: DEBUG nova.compute.manager [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1853.979415] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.140s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1853.981272] env[63024]: INFO nova.compute.claims [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1854.094049] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951122, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1854.095028] env[63024]: DEBUG nova.network.neutron [req-33298a63-419a-43f7-a452-d655b1ec1311 req-7cdcdee9-42d9-4a4a-8837-48f7fc9dca82 service nova] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Updated VIF entry in instance network info cache for port f78f097c-0df1-4f4f-8941-cf21c2b2ca4b. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1854.095362] env[63024]: DEBUG nova.network.neutron [req-33298a63-419a-43f7-a452-d655b1ec1311 req-7cdcdee9-42d9-4a4a-8837-48f7fc9dca82 service nova] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Updating instance_info_cache with network_info: [{"id": "f78f097c-0df1-4f4f-8941-cf21c2b2ca4b", "address": "fa:16:3e:df:09:6b", "network": {"id": "83ed1c04-a2e0-4c15-ae35-68e988607ce4", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-470202335-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.218", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5dcb1fcc9fd945cb9f4477fe1cce3f5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf78f097c-0d", "ovs_interfaceid": "f78f097c-0df1-4f4f-8941-cf21c2b2ca4b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1854.105861] env[63024]: DEBUG oslo_vmware.api [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]520d09ea-ec12-2318-e696-9c33016dde5a, 'name': SearchDatastore_Task, 'duration_secs': 0.016779} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1854.106729] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6f94ccb2-93e5-415b-ad7d-3f13c945d52e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.113348] env[63024]: DEBUG oslo_vmware.api [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Waiting for the task: (returnval){ [ 1854.113348] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]526f853d-38a2-637c-fa9f-7d30feb04fe9" [ 1854.113348] env[63024]: _type = "Task" [ 1854.113348] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1854.119246] env[63024]: DEBUG nova.virt.hardware [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1854.119587] env[63024]: DEBUG nova.virt.hardware [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1854.119958] env[63024]: DEBUG nova.virt.hardware [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1854.119958] env[63024]: DEBUG nova.virt.hardware [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1854.120056] env[63024]: DEBUG nova.virt.hardware [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1854.120182] env[63024]: DEBUG nova.virt.hardware [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1854.120488] env[63024]: DEBUG nova.virt.hardware [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1854.120679] env[63024]: DEBUG nova.virt.hardware [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1854.120854] env[63024]: DEBUG nova.virt.hardware [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1854.121023] env[63024]: DEBUG nova.virt.hardware [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1854.121270] env[63024]: DEBUG nova.virt.hardware [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1854.126955] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c2bcdb88-87d2-4e44-82a5-7a617f1b0b3c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.145408] env[63024]: DEBUG oslo_vmware.api [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]526f853d-38a2-637c-fa9f-7d30feb04fe9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1854.147072] env[63024]: DEBUG oslo_vmware.api [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1854.147072] env[63024]: value = "task-1951124" [ 1854.147072] env[63024]: _type = "Task" [ 1854.147072] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1854.157352] env[63024]: DEBUG oslo_vmware.api [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951124, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1854.323814] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-4edf4386-8895-4c54-9365-4be4b7b3eecc tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] VM already powered off {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1854.324082] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4edf4386-8895-4c54-9365-4be4b7b3eecc tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1854.324254] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4edf4386-8895-4c54-9365-4be4b7b3eecc tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1854.485945] env[63024]: DEBUG nova.compute.utils [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1854.488800] env[63024]: DEBUG nova.compute.manager [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1854.488923] env[63024]: DEBUG nova.network.neutron [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1854.596974] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951122, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.674845} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1854.599388] env[63024]: DEBUG oslo_concurrency.lockutils [req-33298a63-419a-43f7-a452-d655b1ec1311 req-7cdcdee9-42d9-4a4a-8837-48f7fc9dca82 service nova] Releasing lock "refresh_cache-73db94b8-cfa8-4457-bccb-d4b780edbd93" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1854.599388] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 52c17abc-78f0-417b-8675-e8d62bc8baa3/52c17abc-78f0-417b-8675-e8d62bc8baa3.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1854.599388] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1854.599388] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c919cb56-d073-400a-84cc-fbfaf2f6eea9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.606927] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Waiting for the task: (returnval){ [ 1854.606927] env[63024]: value = "task-1951125" [ 1854.606927] env[63024]: _type = "Task" [ 1854.606927] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1854.622774] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951125, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1854.632123] env[63024]: DEBUG oslo_vmware.api [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]526f853d-38a2-637c-fa9f-7d30feb04fe9, 'name': SearchDatastore_Task, 'duration_secs': 0.060463} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1854.632930] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1854.633355] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] e0a37f54-14ca-4eea-a9b3-6e652ca1e48d/e0a37f54-14ca-4eea-a9b3-6e652ca1e48d.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1854.636251] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4edf4386-8895-4c54-9365-4be4b7b3eecc tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1854.636251] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-4edf4386-8895-4c54-9365-4be4b7b3eecc tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1854.636251] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f8093656-dc0c-430d-95cd-703fa3e3e422 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.637318] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-98fc2864-091b-4186-90af-2b37ae64d795 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.646091] env[63024]: DEBUG oslo_vmware.api [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Waiting for the task: (returnval){ [ 1854.646091] env[63024]: value = "task-1951126" [ 1854.646091] env[63024]: _type = "Task" [ 1854.646091] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1854.649581] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-4edf4386-8895-4c54-9365-4be4b7b3eecc tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1854.649841] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4edf4386-8895-4c54-9365-4be4b7b3eecc tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1854.653989] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca2cf3b3-dc7e-44b5-b72b-3fba69cccc96 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.659445] env[63024]: DEBUG oslo_vmware.api [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Task: {'id': task-1951126, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1854.664509] env[63024]: DEBUG oslo_vmware.api [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951124, 'name': ReconfigVM_Task, 'duration_secs': 0.22796} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1854.666517] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Updating instance '1709d916-d0c4-4706-b41b-8b0ed25f3331' progress to 33 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1854.669505] env[63024]: DEBUG oslo_vmware.api [None req-4edf4386-8895-4c54-9365-4be4b7b3eecc tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Waiting for the task: (returnval){ [ 1854.669505] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52775302-6c7f-f6d6-aba6-6622da77b39b" [ 1854.669505] env[63024]: _type = "Task" [ 1854.669505] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1854.680016] env[63024]: DEBUG oslo_vmware.api [None req-4edf4386-8895-4c54-9365-4be4b7b3eecc tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52775302-6c7f-f6d6-aba6-6622da77b39b, 'name': SearchDatastore_Task, 'duration_secs': 0.009363} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1854.681092] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db882092-648b-4435-b255-264fddbb64db {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1854.686554] env[63024]: DEBUG oslo_vmware.api [None req-4edf4386-8895-4c54-9365-4be4b7b3eecc tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Waiting for the task: (returnval){ [ 1854.686554] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]523d72de-65ef-5a03-6269-9721aaec166b" [ 1854.686554] env[63024]: _type = "Task" [ 1854.686554] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1854.696394] env[63024]: DEBUG oslo_vmware.api [None req-4edf4386-8895-4c54-9365-4be4b7b3eecc tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]523d72de-65ef-5a03-6269-9721aaec166b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1854.786651] env[63024]: DEBUG nova.policy [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8d3f9605a2384a919157a571cd164859', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a1166551532c473ca470379b16664513', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1854.954673] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ba82b5f5-8c25-43c9-bd8c-5d6a37f2ccbd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Acquiring lock "b7f26f0e-d5a9-42a6-8af2-065659f89cf5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1854.955012] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ba82b5f5-8c25-43c9-bd8c-5d6a37f2ccbd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Lock "b7f26f0e-d5a9-42a6-8af2-065659f89cf5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1854.955238] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ba82b5f5-8c25-43c9-bd8c-5d6a37f2ccbd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Acquiring lock "b7f26f0e-d5a9-42a6-8af2-065659f89cf5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1854.955504] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ba82b5f5-8c25-43c9-bd8c-5d6a37f2ccbd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Lock "b7f26f0e-d5a9-42a6-8af2-065659f89cf5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1854.955712] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ba82b5f5-8c25-43c9-bd8c-5d6a37f2ccbd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Lock "b7f26f0e-d5a9-42a6-8af2-065659f89cf5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1854.958657] env[63024]: INFO nova.compute.manager [None req-ba82b5f5-8c25-43c9-bd8c-5d6a37f2ccbd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Terminating instance [ 1854.989910] env[63024]: DEBUG nova.compute.manager [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1855.117628] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951125, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072576} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1855.117995] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1855.119454] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21630ebb-81d1-4fd6-8922-66a4646493d7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.156444] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Reconfiguring VM instance instance-00000045 to attach disk [datastore1] 52c17abc-78f0-417b-8675-e8d62bc8baa3/52c17abc-78f0-417b-8675-e8d62bc8baa3.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1855.164752] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5074028c-d78c-442e-ac36-a4716543bffb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.190163] env[63024]: DEBUG nova.virt.hardware [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1855.190539] env[63024]: DEBUG nova.virt.hardware [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1855.190611] env[63024]: DEBUG nova.virt.hardware [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1855.190744] env[63024]: DEBUG nova.virt.hardware [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1855.190887] env[63024]: DEBUG nova.virt.hardware [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1855.191041] env[63024]: DEBUG nova.virt.hardware [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1855.191323] env[63024]: DEBUG nova.virt.hardware [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1855.191614] env[63024]: DEBUG nova.virt.hardware [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1855.191703] env[63024]: DEBUG nova.virt.hardware [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1855.191869] env[63024]: DEBUG nova.virt.hardware [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1855.191988] env[63024]: DEBUG nova.virt.hardware [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1855.197556] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Reconfiguring VM instance instance-00000039 to detach disk 2000 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1855.198803] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0ac80622-c632-4053-bd0b-1a23f6fb9617 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.221845] env[63024]: DEBUG oslo_vmware.api [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Task: {'id': task-1951126, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.522495} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1855.224080] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] e0a37f54-14ca-4eea-a9b3-6e652ca1e48d/e0a37f54-14ca-4eea-a9b3-6e652ca1e48d.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1855.224186] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1855.224422] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Waiting for the task: (returnval){ [ 1855.224422] env[63024]: value = "task-1951127" [ 1855.224422] env[63024]: _type = "Task" [ 1855.224422] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1855.224619] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-13fac6fb-897e-4e4e-8dba-e6a41159d14c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.233314] env[63024]: DEBUG oslo_vmware.api [None req-4edf4386-8895-4c54-9365-4be4b7b3eecc tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]523d72de-65ef-5a03-6269-9721aaec166b, 'name': SearchDatastore_Task, 'duration_secs': 0.01122} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1855.233602] env[63024]: DEBUG oslo_vmware.api [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1855.233602] env[63024]: value = "task-1951128" [ 1855.233602] env[63024]: _type = "Task" [ 1855.233602] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1855.236542] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4edf4386-8895-4c54-9365-4be4b7b3eecc tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1855.236862] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-4edf4386-8895-4c54-9365-4be4b7b3eecc tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 1448c924-7c61-4c43-a4e7-5a6dd45375cc/2646ca61-612e-4bc3-97f7-ee492c048835-rescue.vmdk. {{(pid=63024) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1855.237941] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f5d5deed-6a79-4fba-92a2-dd44c70fef7f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.247465] env[63024]: DEBUG oslo_vmware.api [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Waiting for the task: (returnval){ [ 1855.247465] env[63024]: value = "task-1951129" [ 1855.247465] env[63024]: _type = "Task" [ 1855.247465] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1855.247744] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951127, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.259777] env[63024]: DEBUG oslo_vmware.api [None req-4edf4386-8895-4c54-9365-4be4b7b3eecc tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Waiting for the task: (returnval){ [ 1855.259777] env[63024]: value = "task-1951130" [ 1855.259777] env[63024]: _type = "Task" [ 1855.259777] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1855.260051] env[63024]: DEBUG oslo_vmware.api [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951128, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.266478] env[63024]: DEBUG oslo_vmware.api [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Task: {'id': task-1951129, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.270804] env[63024]: DEBUG oslo_vmware.api [None req-4edf4386-8895-4c54-9365-4be4b7b3eecc tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Task: {'id': task-1951130, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.283503] env[63024]: DEBUG nova.network.neutron [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Successfully created port: accf6d7d-a9ad-4eb7-bdee-7937675b2370 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1855.462727] env[63024]: DEBUG nova.compute.manager [None req-ba82b5f5-8c25-43c9-bd8c-5d6a37f2ccbd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1855.462960] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-ba82b5f5-8c25-43c9-bd8c-5d6a37f2ccbd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1855.463879] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8696891-ef16-4e81-b9a2-71f9fe3a7f79 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.474879] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba82b5f5-8c25-43c9-bd8c-5d6a37f2ccbd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1855.475176] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-48a69fbe-f833-4fa6-97ec-544a0748ae46 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.481233] env[63024]: DEBUG oslo_vmware.api [None req-ba82b5f5-8c25-43c9-bd8c-5d6a37f2ccbd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Waiting for the task: (returnval){ [ 1855.481233] env[63024]: value = "task-1951131" [ 1855.481233] env[63024]: _type = "Task" [ 1855.481233] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1855.493565] env[63024]: DEBUG oslo_vmware.api [None req-ba82b5f5-8c25-43c9-bd8c-5d6a37f2ccbd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': task-1951131, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.609061] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ea095ca-ba3c-46f9-bdf7-ee50819bf5f0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.618291] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c5b9c21-aae5-488f-8839-9ad5d2371048 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.651400] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea5dbd78-fd35-46d7-8c9e-a706c83ba472 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.658798] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff438f10-a32a-4a26-9dfc-bf03307ecdae {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.673250] env[63024]: DEBUG nova.compute.provider_tree [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1855.736837] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951127, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.747050] env[63024]: DEBUG oslo_vmware.api [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951128, 'name': ReconfigVM_Task, 'duration_secs': 0.446481} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1855.747377] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Reconfigured VM instance instance-00000039 to detach disk 2000 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1855.748192] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7564f4e-2e5b-415f-83ff-906fe4ebb87a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.773761] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Reconfiguring VM instance instance-00000039 to attach disk [datastore1] 1709d916-d0c4-4706-b41b-8b0ed25f3331/1709d916-d0c4-4706-b41b-8b0ed25f3331.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1855.779662] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e060d3b2-068b-4d77-aeb1-e00292ea80a7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.792481] env[63024]: DEBUG oslo_vmware.api [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Task: {'id': task-1951129, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.300872} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1855.792723] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1855.794163] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-841a5201-8fc9-46d4-a9e0-b33e8eea408f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.799528] env[63024]: DEBUG oslo_vmware.api [None req-4edf4386-8895-4c54-9365-4be4b7b3eecc tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Task: {'id': task-1951130, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.509399} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1855.801080] env[63024]: INFO nova.virt.vmwareapi.ds_util [None req-4edf4386-8895-4c54-9365-4be4b7b3eecc tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 1448c924-7c61-4c43-a4e7-5a6dd45375cc/2646ca61-612e-4bc3-97f7-ee492c048835-rescue.vmdk. [ 1855.801467] env[63024]: DEBUG oslo_vmware.api [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1855.801467] env[63024]: value = "task-1951132" [ 1855.801467] env[63024]: _type = "Task" [ 1855.801467] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1855.802345] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb8a94ac-b869-430b-95a4-77373dd4abd3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.828040] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] e0a37f54-14ca-4eea-a9b3-6e652ca1e48d/e0a37f54-14ca-4eea-a9b3-6e652ca1e48d.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1855.829108] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-207cf405-56a1-41ec-b398-f7dd6b51d45c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.865836] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-4edf4386-8895-4c54-9365-4be4b7b3eecc tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Reconfiguring VM instance instance-00000042 to attach disk [datastore1] 1448c924-7c61-4c43-a4e7-5a6dd45375cc/2646ca61-612e-4bc3-97f7-ee492c048835-rescue.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1855.869692] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-afb0f61a-4bbd-4822-a39e-e38b03c3df91 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1855.883250] env[63024]: DEBUG oslo_vmware.api [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951132, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.884142] env[63024]: DEBUG oslo_vmware.api [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Waiting for the task: (returnval){ [ 1855.884142] env[63024]: value = "task-1951133" [ 1855.884142] env[63024]: _type = "Task" [ 1855.884142] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1855.888645] env[63024]: DEBUG oslo_vmware.api [None req-4edf4386-8895-4c54-9365-4be4b7b3eecc tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Waiting for the task: (returnval){ [ 1855.888645] env[63024]: value = "task-1951134" [ 1855.888645] env[63024]: _type = "Task" [ 1855.888645] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1855.901640] env[63024]: DEBUG oslo_vmware.api [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Task: {'id': task-1951133, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.904983] env[63024]: DEBUG oslo_vmware.api [None req-4edf4386-8895-4c54-9365-4be4b7b3eecc tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Task: {'id': task-1951134, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1855.991281] env[63024]: DEBUG oslo_vmware.api [None req-ba82b5f5-8c25-43c9-bd8c-5d6a37f2ccbd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': task-1951131, 'name': PowerOffVM_Task, 'duration_secs': 0.39112} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1855.991616] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-ba82b5f5-8c25-43c9-bd8c-5d6a37f2ccbd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1855.993019] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-ba82b5f5-8c25-43c9-bd8c-5d6a37f2ccbd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1855.993019] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9754df2b-37df-4791-9d84-1308c4febba6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.007161] env[63024]: DEBUG nova.compute.manager [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1856.035808] env[63024]: DEBUG nova.virt.hardware [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1856.036114] env[63024]: DEBUG nova.virt.hardware [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1856.036277] env[63024]: DEBUG nova.virt.hardware [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1856.036458] env[63024]: DEBUG nova.virt.hardware [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1856.036601] env[63024]: DEBUG nova.virt.hardware [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1856.036743] env[63024]: DEBUG nova.virt.hardware [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1856.036940] env[63024]: DEBUG nova.virt.hardware [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1856.037116] env[63024]: DEBUG nova.virt.hardware [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1856.037755] env[63024]: DEBUG nova.virt.hardware [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1856.038014] env[63024]: DEBUG nova.virt.hardware [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1856.038217] env[63024]: DEBUG nova.virt.hardware [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1856.039412] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73c83368-546a-4b3a-af30-fd7609445c8f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.047063] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e57f29dd-765a-4b27-a9aa-f09bfbbb917c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.108169] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-ba82b5f5-8c25-43c9-bd8c-5d6a37f2ccbd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1856.108401] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-ba82b5f5-8c25-43c9-bd8c-5d6a37f2ccbd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1856.108651] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba82b5f5-8c25-43c9-bd8c-5d6a37f2ccbd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Deleting the datastore file [datastore1] b7f26f0e-d5a9-42a6-8af2-065659f89cf5 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1856.108948] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c2820e25-62aa-45a5-8323-aff2362e4897 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.115388] env[63024]: DEBUG oslo_vmware.api [None req-ba82b5f5-8c25-43c9-bd8c-5d6a37f2ccbd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Waiting for the task: (returnval){ [ 1856.115388] env[63024]: value = "task-1951136" [ 1856.115388] env[63024]: _type = "Task" [ 1856.115388] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1856.123491] env[63024]: DEBUG oslo_vmware.api [None req-ba82b5f5-8c25-43c9-bd8c-5d6a37f2ccbd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': task-1951136, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.194367] env[63024]: ERROR nova.scheduler.client.report [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [req-2f879c3b-96ca-4578-8bd1-aa7618da81e9] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 89dfa68a-133e-436f-a9f1-86051f9fb96b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-2f879c3b-96ca-4578-8bd1-aa7618da81e9"}]} [ 1856.210171] env[63024]: DEBUG nova.scheduler.client.report [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Refreshing inventories for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1856.224223] env[63024]: DEBUG nova.scheduler.client.report [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Updating ProviderTree inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1856.224452] env[63024]: DEBUG nova.compute.provider_tree [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1856.236236] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951127, 'name': ReconfigVM_Task, 'duration_secs': 0.673097} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1856.237135] env[63024]: DEBUG nova.scheduler.client.report [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Refreshing aggregate associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, aggregates: None {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1856.239079] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Reconfigured VM instance instance-00000045 to attach disk [datastore1] 52c17abc-78f0-417b-8675-e8d62bc8baa3/52c17abc-78f0-417b-8675-e8d62bc8baa3.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1856.239910] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-432afe32-b1ba-448e-a755-550ce283150b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.246310] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Waiting for the task: (returnval){ [ 1856.246310] env[63024]: value = "task-1951137" [ 1856.246310] env[63024]: _type = "Task" [ 1856.246310] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1856.254797] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951137, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.257845] env[63024]: DEBUG nova.scheduler.client.report [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Refreshing trait associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1856.316230] env[63024]: DEBUG oslo_vmware.api [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951132, 'name': ReconfigVM_Task, 'duration_secs': 0.272966} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1856.316526] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Reconfigured VM instance instance-00000039 to attach disk [datastore1] 1709d916-d0c4-4706-b41b-8b0ed25f3331/1709d916-d0c4-4706-b41b-8b0ed25f3331.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1856.316897] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Updating instance '1709d916-d0c4-4706-b41b-8b0ed25f3331' progress to 50 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1856.404145] env[63024]: DEBUG oslo_vmware.api [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Task: {'id': task-1951133, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.408602] env[63024]: DEBUG oslo_vmware.api [None req-4edf4386-8895-4c54-9365-4be4b7b3eecc tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Task: {'id': task-1951134, 'name': ReconfigVM_Task, 'duration_secs': 0.294599} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1856.412232] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-4edf4386-8895-4c54-9365-4be4b7b3eecc tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Reconfigured VM instance instance-00000042 to attach disk [datastore1] 1448c924-7c61-4c43-a4e7-5a6dd45375cc/2646ca61-612e-4bc3-97f7-ee492c048835-rescue.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1856.413732] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b6ef33b-8ede-450f-8857-c8d59cf59722 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.445117] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-15e1cc98-d80c-4046-95a2-285687911520 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.460893] env[63024]: DEBUG oslo_vmware.api [None req-4edf4386-8895-4c54-9365-4be4b7b3eecc tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Waiting for the task: (returnval){ [ 1856.460893] env[63024]: value = "task-1951138" [ 1856.460893] env[63024]: _type = "Task" [ 1856.460893] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1856.468855] env[63024]: DEBUG oslo_vmware.api [None req-4edf4386-8895-4c54-9365-4be4b7b3eecc tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Task: {'id': task-1951138, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.627055] env[63024]: DEBUG oslo_vmware.api [None req-ba82b5f5-8c25-43c9-bd8c-5d6a37f2ccbd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Task: {'id': task-1951136, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143548} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1856.627055] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-ba82b5f5-8c25-43c9-bd8c-5d6a37f2ccbd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1856.627239] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-ba82b5f5-8c25-43c9-bd8c-5d6a37f2ccbd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1856.627414] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-ba82b5f5-8c25-43c9-bd8c-5d6a37f2ccbd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1856.627581] env[63024]: INFO nova.compute.manager [None req-ba82b5f5-8c25-43c9-bd8c-5d6a37f2ccbd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1856.627815] env[63024]: DEBUG oslo.service.loopingcall [None req-ba82b5f5-8c25-43c9-bd8c-5d6a37f2ccbd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1856.628009] env[63024]: DEBUG nova.compute.manager [-] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1856.628163] env[63024]: DEBUG nova.network.neutron [-] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1856.758084] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951137, 'name': Rename_Task, 'duration_secs': 0.335912} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1856.760732] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1856.761410] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-344c1bf6-aca0-4b5a-b577-7bcea92a6450 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.768723] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Waiting for the task: (returnval){ [ 1856.768723] env[63024]: value = "task-1951139" [ 1856.768723] env[63024]: _type = "Task" [ 1856.768723] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1856.779349] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951139, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.786018] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ae7d4e8-68b7-4e2d-86a5-61eb7fdb6e16 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.791526] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97c5bdbc-d91c-496f-bb6f-19450e21bcbe {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.822090] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88cfd1de-75e5-4022-9b42-2fa9f485b66d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.825848] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b6048db-eeab-406c-bcb1-f0ce6b63ed03 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.848058] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13985f31-4c52-4847-9f39-991763856d1f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.853350] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d433cfa3-aaaa-4f3c-9c7b-d7747ccc712f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.872262] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Updating instance '1709d916-d0c4-4706-b41b-8b0ed25f3331' progress to 67 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1856.884822] env[63024]: DEBUG nova.compute.provider_tree [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1856.894664] env[63024]: DEBUG oslo_vmware.api [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Task: {'id': task-1951133, 'name': ReconfigVM_Task, 'duration_secs': 0.824008} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1856.895292] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Reconfigured VM instance instance-00000044 to attach disk [datastore1] e0a37f54-14ca-4eea-a9b3-6e652ca1e48d/e0a37f54-14ca-4eea-a9b3-6e652ca1e48d.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1856.897128] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d2f8bcbf-794c-4764-baf8-5a6c56fee657 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.902488] env[63024]: DEBUG oslo_vmware.api [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Waiting for the task: (returnval){ [ 1856.902488] env[63024]: value = "task-1951140" [ 1856.902488] env[63024]: _type = "Task" [ 1856.902488] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1856.911537] env[63024]: DEBUG oslo_vmware.api [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Task: {'id': task-1951140, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1856.932454] env[63024]: DEBUG nova.compute.manager [req-1e3ec378-879a-4115-8ec8-c4f79f8eb5b3 req-ceff865e-eec5-40d7-9b32-61267f482c7c service nova] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Received event network-vif-plugged-accf6d7d-a9ad-4eb7-bdee-7937675b2370 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1856.932751] env[63024]: DEBUG oslo_concurrency.lockutils [req-1e3ec378-879a-4115-8ec8-c4f79f8eb5b3 req-ceff865e-eec5-40d7-9b32-61267f482c7c service nova] Acquiring lock "e1be531c-e849-42ac-8319-5bd453a7a562-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1856.932999] env[63024]: DEBUG oslo_concurrency.lockutils [req-1e3ec378-879a-4115-8ec8-c4f79f8eb5b3 req-ceff865e-eec5-40d7-9b32-61267f482c7c service nova] Lock "e1be531c-e849-42ac-8319-5bd453a7a562-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1856.933148] env[63024]: DEBUG oslo_concurrency.lockutils [req-1e3ec378-879a-4115-8ec8-c4f79f8eb5b3 req-ceff865e-eec5-40d7-9b32-61267f482c7c service nova] Lock "e1be531c-e849-42ac-8319-5bd453a7a562-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1856.934249] env[63024]: DEBUG nova.compute.manager [req-1e3ec378-879a-4115-8ec8-c4f79f8eb5b3 req-ceff865e-eec5-40d7-9b32-61267f482c7c service nova] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] No waiting events found dispatching network-vif-plugged-accf6d7d-a9ad-4eb7-bdee-7937675b2370 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1856.934249] env[63024]: WARNING nova.compute.manager [req-1e3ec378-879a-4115-8ec8-c4f79f8eb5b3 req-ceff865e-eec5-40d7-9b32-61267f482c7c service nova] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Received unexpected event network-vif-plugged-accf6d7d-a9ad-4eb7-bdee-7937675b2370 for instance with vm_state building and task_state spawning. [ 1856.971285] env[63024]: DEBUG oslo_vmware.api [None req-4edf4386-8895-4c54-9365-4be4b7b3eecc tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Task: {'id': task-1951138, 'name': ReconfigVM_Task, 'duration_secs': 0.146901} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1856.971539] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-4edf4386-8895-4c54-9365-4be4b7b3eecc tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1856.971795] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b96db8d5-ad1e-430e-b566-89abd7959242 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1856.980152] env[63024]: DEBUG oslo_vmware.api [None req-4edf4386-8895-4c54-9365-4be4b7b3eecc tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Waiting for the task: (returnval){ [ 1856.980152] env[63024]: value = "task-1951141" [ 1856.980152] env[63024]: _type = "Task" [ 1856.980152] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1856.986365] env[63024]: DEBUG oslo_vmware.api [None req-4edf4386-8895-4c54-9365-4be4b7b3eecc tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Task: {'id': task-1951141, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1857.016023] env[63024]: DEBUG nova.network.neutron [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Successfully updated port: accf6d7d-a9ad-4eb7-bdee-7937675b2370 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1857.161822] env[63024]: DEBUG nova.compute.manager [req-cd93a0b3-4e27-4043-b429-f26a8b6f8500 req-bd1a113e-aadf-4f38-9d5b-6a5910fd5135 service nova] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Received event network-vif-deleted-c1430120-4c82-424a-8155-f1e22eb4a7ae {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1857.161822] env[63024]: INFO nova.compute.manager [req-cd93a0b3-4e27-4043-b429-f26a8b6f8500 req-bd1a113e-aadf-4f38-9d5b-6a5910fd5135 service nova] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Neutron deleted interface c1430120-4c82-424a-8155-f1e22eb4a7ae; detaching it from the instance and deleting it from the info cache [ 1857.161941] env[63024]: DEBUG nova.network.neutron [req-cd93a0b3-4e27-4043-b429-f26a8b6f8500 req-bd1a113e-aadf-4f38-9d5b-6a5910fd5135 service nova] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1857.279303] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951139, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1857.413333] env[63024]: DEBUG oslo_vmware.api [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Task: {'id': task-1951140, 'name': Rename_Task, 'duration_secs': 0.370091} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1857.413687] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1857.413820] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f07def3d-4be5-40ee-9a34-48cf3a44d2a6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.419993] env[63024]: DEBUG oslo_vmware.api [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Waiting for the task: (returnval){ [ 1857.419993] env[63024]: value = "task-1951142" [ 1857.419993] env[63024]: _type = "Task" [ 1857.419993] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1857.427951] env[63024]: DEBUG oslo_vmware.api [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Task: {'id': task-1951142, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1857.428800] env[63024]: DEBUG nova.scheduler.client.report [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Updated inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with generation 107 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1857.429037] env[63024]: DEBUG nova.compute.provider_tree [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Updating resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b generation from 107 to 108 during operation: update_inventory {{(pid=63024) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1857.429255] env[63024]: DEBUG nova.compute.provider_tree [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1857.432885] env[63024]: DEBUG nova.network.neutron [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Port 611e1e79-ffb8-4ba9-8718-b57360eaa492 binding to destination host cpu-1 is already ACTIVE {{(pid=63024) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1857.487773] env[63024]: DEBUG oslo_vmware.api [None req-4edf4386-8895-4c54-9365-4be4b7b3eecc tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Task: {'id': task-1951141, 'name': PowerOnVM_Task, 'duration_secs': 0.409843} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1857.488032] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-4edf4386-8895-4c54-9365-4be4b7b3eecc tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1857.490684] env[63024]: DEBUG nova.compute.manager [None req-4edf4386-8895-4c54-9365-4be4b7b3eecc tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1857.491539] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45ead0e1-e435-45ea-af5a-9933e6e93af5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.521824] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Acquiring lock "refresh_cache-e1be531c-e849-42ac-8319-5bd453a7a562" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1857.521978] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Acquired lock "refresh_cache-e1be531c-e849-42ac-8319-5bd453a7a562" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1857.522147] env[63024]: DEBUG nova.network.neutron [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1857.638093] env[63024]: DEBUG nova.network.neutron [-] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1857.665011] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-42c62b9b-0701-43e6-877e-cbb9c7d71124 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.676017] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2394914-f1bc-4c59-bca1-e24229c153a5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1857.711434] env[63024]: DEBUG nova.compute.manager [req-cd93a0b3-4e27-4043-b429-f26a8b6f8500 req-bd1a113e-aadf-4f38-9d5b-6a5910fd5135 service nova] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Detach interface failed, port_id=c1430120-4c82-424a-8155-f1e22eb4a7ae, reason: Instance b7f26f0e-d5a9-42a6-8af2-065659f89cf5 could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 1857.780723] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951139, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1857.932469] env[63024]: DEBUG oslo_vmware.api [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Task: {'id': task-1951142, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1857.938982] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.960s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1857.939576] env[63024]: DEBUG nova.compute.manager [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1857.942040] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.888s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1857.943541] env[63024]: INFO nova.compute.claims [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1858.053239] env[63024]: DEBUG nova.network.neutron [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1858.143062] env[63024]: INFO nova.compute.manager [-] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Took 1.51 seconds to deallocate network for instance. [ 1858.197057] env[63024]: DEBUG nova.network.neutron [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Updating instance_info_cache with network_info: [{"id": "accf6d7d-a9ad-4eb7-bdee-7937675b2370", "address": "fa:16:3e:34:06:68", "network": {"id": "9e97434d-c36c-478d-a559-df5b5d8bcd77", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-630350621-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a1166551532c473ca470379b16664513", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4055505f-97ab-400b-969c-43d99b38fd48", "external-id": "nsx-vlan-transportzone-952", "segmentation_id": 952, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaccf6d7d-a9", "ovs_interfaceid": "accf6d7d-a9ad-4eb7-bdee-7937675b2370", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1858.281056] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951139, 'name': PowerOnVM_Task, 'duration_secs': 1.013556} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1858.281586] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1858.281586] env[63024]: INFO nova.compute.manager [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Took 9.72 seconds to spawn the instance on the hypervisor. [ 1858.281736] env[63024]: DEBUG nova.compute.manager [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1858.282566] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-294479ce-e619-4ec4-8b6d-ffe007db75ea {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.430881] env[63024]: DEBUG oslo_vmware.api [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Task: {'id': task-1951142, 'name': PowerOnVM_Task, 'duration_secs': 0.885569} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1858.431233] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1858.431496] env[63024]: INFO nova.compute.manager [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Took 12.64 seconds to spawn the instance on the hypervisor. [ 1858.431683] env[63024]: DEBUG nova.compute.manager [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1858.432475] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a140ecb-1224-4114-8419-c8c8f0ae1fdc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.451222] env[63024]: DEBUG nova.compute.utils [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1858.456418] env[63024]: DEBUG nova.compute.manager [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1858.456594] env[63024]: DEBUG nova.network.neutron [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1858.462883] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "1709d916-d0c4-4706-b41b-8b0ed25f3331-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1858.463122] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "1709d916-d0c4-4706-b41b-8b0ed25f3331-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1858.463299] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "1709d916-d0c4-4706-b41b-8b0ed25f3331-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1858.510251] env[63024]: DEBUG nova.policy [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '36ec7a083bcc41c2a0b6aedfe1aba470', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e138433d59374418952a186a4d2a0f78', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1858.647443] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ba82b5f5-8c25-43c9-bd8c-5d6a37f2ccbd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1858.700173] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Releasing lock "refresh_cache-e1be531c-e849-42ac-8319-5bd453a7a562" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1858.700440] env[63024]: DEBUG nova.compute.manager [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Instance network_info: |[{"id": "accf6d7d-a9ad-4eb7-bdee-7937675b2370", "address": "fa:16:3e:34:06:68", "network": {"id": "9e97434d-c36c-478d-a559-df5b5d8bcd77", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-630350621-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a1166551532c473ca470379b16664513", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4055505f-97ab-400b-969c-43d99b38fd48", "external-id": "nsx-vlan-transportzone-952", "segmentation_id": 952, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaccf6d7d-a9", "ovs_interfaceid": "accf6d7d-a9ad-4eb7-bdee-7937675b2370", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1858.700847] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:34:06:68', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4055505f-97ab-400b-969c-43d99b38fd48', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'accf6d7d-a9ad-4eb7-bdee-7937675b2370', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1858.708909] env[63024]: DEBUG oslo.service.loopingcall [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1858.711417] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1858.711801] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ef96a6d9-6c06-435c-9a44-440037a9c2e7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.736644] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1858.736644] env[63024]: value = "task-1951143" [ 1858.736644] env[63024]: _type = "Task" [ 1858.736644] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1858.749215] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951143, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1858.793113] env[63024]: DEBUG nova.network.neutron [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Successfully created port: 7cedcfa3-1f00-4ebd-88b4-207d64c14235 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1858.803495] env[63024]: INFO nova.compute.manager [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Took 40.99 seconds to build instance. [ 1858.949136] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deb95843-60f1-435c-99bd-42687efd4157 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.955324] env[63024]: INFO nova.compute.manager [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Took 42.89 seconds to build instance. [ 1858.956780] env[63024]: DEBUG nova.compute.manager [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1858.965175] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3965ea00-a66c-4e61-b490-7677e6a36cef {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1858.971136] env[63024]: DEBUG nova.compute.manager [req-310bf13a-273a-48f2-88cb-e0b526bb9474 req-39f98786-ab0d-4df1-bb67-4a74c125914e service nova] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Received event network-changed-accf6d7d-a9ad-4eb7-bdee-7937675b2370 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1858.971136] env[63024]: DEBUG nova.compute.manager [req-310bf13a-273a-48f2-88cb-e0b526bb9474 req-39f98786-ab0d-4df1-bb67-4a74c125914e service nova] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Refreshing instance network info cache due to event network-changed-accf6d7d-a9ad-4eb7-bdee-7937675b2370. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1858.971136] env[63024]: DEBUG oslo_concurrency.lockutils [req-310bf13a-273a-48f2-88cb-e0b526bb9474 req-39f98786-ab0d-4df1-bb67-4a74c125914e service nova] Acquiring lock "refresh_cache-e1be531c-e849-42ac-8319-5bd453a7a562" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1858.971136] env[63024]: DEBUG oslo_concurrency.lockutils [req-310bf13a-273a-48f2-88cb-e0b526bb9474 req-39f98786-ab0d-4df1-bb67-4a74c125914e service nova] Acquired lock "refresh_cache-e1be531c-e849-42ac-8319-5bd453a7a562" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1858.971600] env[63024]: DEBUG nova.network.neutron [req-310bf13a-273a-48f2-88cb-e0b526bb9474 req-39f98786-ab0d-4df1-bb67-4a74c125914e service nova] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Refreshing network info cache for port accf6d7d-a9ad-4eb7-bdee-7937675b2370 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1859.015724] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23c65e1f-d464-456c-ab14-2664278e40b5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.024632] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ded43986-30f5-4ebc-b321-623c193b6f3b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.040293] env[63024]: DEBUG nova.compute.provider_tree [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1859.249387] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951143, 'name': CreateVM_Task, 'duration_secs': 0.384627} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1859.250073] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1859.251209] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1859.251362] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1859.251649] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1859.251922] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be9d8bab-5984-4f6f-b9a0-8a339636b829 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.257224] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Waiting for the task: (returnval){ [ 1859.257224] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]520373ee-4b6f-2f1c-4bfa-5090d67eabc9" [ 1859.257224] env[63024]: _type = "Task" [ 1859.257224] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1859.265843] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]520373ee-4b6f-2f1c-4bfa-5090d67eabc9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1859.305572] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Lock "52c17abc-78f0-417b-8675-e8d62bc8baa3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.232s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1859.460873] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9992c4e1-b0f8-41a1-92d1-548508a279e4 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Lock "e0a37f54-14ca-4eea-a9b3-6e652ca1e48d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 81.602s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1859.543115] env[63024]: DEBUG nova.scheduler.client.report [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1859.563587] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "refresh_cache-1709d916-d0c4-4706-b41b-8b0ed25f3331" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1859.563796] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquired lock "refresh_cache-1709d916-d0c4-4706-b41b-8b0ed25f3331" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1859.563981] env[63024]: DEBUG nova.network.neutron [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1859.687565] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4a8357c2-d3bd-46b3-9705-23a858a5bc94 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Acquiring lock "e0a37f54-14ca-4eea-a9b3-6e652ca1e48d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1859.687565] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4a8357c2-d3bd-46b3-9705-23a858a5bc94 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Lock "e0a37f54-14ca-4eea-a9b3-6e652ca1e48d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1859.687565] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4a8357c2-d3bd-46b3-9705-23a858a5bc94 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Acquiring lock "e0a37f54-14ca-4eea-a9b3-6e652ca1e48d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1859.687565] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4a8357c2-d3bd-46b3-9705-23a858a5bc94 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Lock "e0a37f54-14ca-4eea-a9b3-6e652ca1e48d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1859.687565] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4a8357c2-d3bd-46b3-9705-23a858a5bc94 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Lock "e0a37f54-14ca-4eea-a9b3-6e652ca1e48d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1859.689332] env[63024]: INFO nova.compute.manager [None req-4a8357c2-d3bd-46b3-9705-23a858a5bc94 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Terminating instance [ 1859.768670] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]520373ee-4b6f-2f1c-4bfa-5090d67eabc9, 'name': SearchDatastore_Task, 'duration_secs': 0.011035} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1859.771458] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1859.771718] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1859.771958] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1859.772125] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1859.772307] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1859.772813] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1ddeaa48-f459-4c79-9685-a5b821d2c9cf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.783059] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1859.783255] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1859.783971] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ff645a1-efe5-4ab4-8730-9602502821b1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1859.790090] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Waiting for the task: (returnval){ [ 1859.790090] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5282cb6f-d827-05fa-6c16-183f6a52fcb4" [ 1859.790090] env[63024]: _type = "Task" [ 1859.790090] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1859.799504] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5282cb6f-d827-05fa-6c16-183f6a52fcb4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1859.808826] env[63024]: DEBUG nova.compute.manager [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1859.933676] env[63024]: DEBUG nova.network.neutron [req-310bf13a-273a-48f2-88cb-e0b526bb9474 req-39f98786-ab0d-4df1-bb67-4a74c125914e service nova] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Updated VIF entry in instance network info cache for port accf6d7d-a9ad-4eb7-bdee-7937675b2370. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1859.934092] env[63024]: DEBUG nova.network.neutron [req-310bf13a-273a-48f2-88cb-e0b526bb9474 req-39f98786-ab0d-4df1-bb67-4a74c125914e service nova] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Updating instance_info_cache with network_info: [{"id": "accf6d7d-a9ad-4eb7-bdee-7937675b2370", "address": "fa:16:3e:34:06:68", "network": {"id": "9e97434d-c36c-478d-a559-df5b5d8bcd77", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-630350621-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a1166551532c473ca470379b16664513", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4055505f-97ab-400b-969c-43d99b38fd48", "external-id": "nsx-vlan-transportzone-952", "segmentation_id": 952, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapaccf6d7d-a9", "ovs_interfaceid": "accf6d7d-a9ad-4eb7-bdee-7937675b2370", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1859.969435] env[63024]: DEBUG nova.compute.manager [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1860.002514] env[63024]: DEBUG nova.virt.hardware [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1860.002757] env[63024]: DEBUG nova.virt.hardware [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1860.002910] env[63024]: DEBUG nova.virt.hardware [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1860.003097] env[63024]: DEBUG nova.virt.hardware [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1860.003243] env[63024]: DEBUG nova.virt.hardware [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1860.003386] env[63024]: DEBUG nova.virt.hardware [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1860.003604] env[63024]: DEBUG nova.virt.hardware [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1860.003767] env[63024]: DEBUG nova.virt.hardware [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1860.003935] env[63024]: DEBUG nova.virt.hardware [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1860.004106] env[63024]: DEBUG nova.virt.hardware [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1860.004316] env[63024]: DEBUG nova.virt.hardware [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1860.005178] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1912c274-01a7-4807-ad20-d4c652bb82c2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.014593] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44629558-6907-45d0-aea3-8630c7aceac9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.048250] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.106s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1860.048760] env[63024]: DEBUG nova.compute.manager [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1860.051470] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 28.443s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1860.193825] env[63024]: DEBUG nova.compute.manager [None req-4a8357c2-d3bd-46b3-9705-23a858a5bc94 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1860.194225] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4a8357c2-d3bd-46b3-9705-23a858a5bc94 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1860.195201] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3e7faca-3ed7-460a-9f3c-09e8caaaefb8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.207434] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a8357c2-d3bd-46b3-9705-23a858a5bc94 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1860.210747] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4a9f66fa-a45e-49e5-86e0-0724af2ac8f3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.218639] env[63024]: DEBUG oslo_vmware.api [None req-4a8357c2-d3bd-46b3-9705-23a858a5bc94 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Waiting for the task: (returnval){ [ 1860.218639] env[63024]: value = "task-1951144" [ 1860.218639] env[63024]: _type = "Task" [ 1860.218639] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1860.227272] env[63024]: DEBUG oslo_vmware.api [None req-4a8357c2-d3bd-46b3-9705-23a858a5bc94 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Task: {'id': task-1951144, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1860.303121] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5282cb6f-d827-05fa-6c16-183f6a52fcb4, 'name': SearchDatastore_Task, 'duration_secs': 0.00916} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1860.306186] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a89904ef-94a6-4cc4-97f4-192a9f89f8a6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.316036] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Waiting for the task: (returnval){ [ 1860.316036] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52ec6619-22a3-a920-14f9-0927169bd814" [ 1860.316036] env[63024]: _type = "Task" [ 1860.316036] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1860.331325] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52ec6619-22a3-a920-14f9-0927169bd814, 'name': SearchDatastore_Task, 'duration_secs': 0.011} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1860.331674] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1860.331943] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] e1be531c-e849-42ac-8319-5bd453a7a562/e1be531c-e849-42ac-8319-5bd453a7a562.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1860.332156] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0fd01943-1af0-40de-9fe4-5f14c95c4ad1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.335986] env[63024]: DEBUG nova.compute.manager [req-0d9d6a1f-2c6a-4930-8c54-f634e483f88e req-831d4249-58ea-4522-bdf4-bf81e8c64dea service nova] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Received event network-vif-plugged-7cedcfa3-1f00-4ebd-88b4-207d64c14235 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1860.336226] env[63024]: DEBUG oslo_concurrency.lockutils [req-0d9d6a1f-2c6a-4930-8c54-f634e483f88e req-831d4249-58ea-4522-bdf4-bf81e8c64dea service nova] Acquiring lock "77c27741-ee3a-4a8b-bbd3-89759288f7c6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1860.337223] env[63024]: DEBUG oslo_concurrency.lockutils [req-0d9d6a1f-2c6a-4930-8c54-f634e483f88e req-831d4249-58ea-4522-bdf4-bf81e8c64dea service nova] Lock "77c27741-ee3a-4a8b-bbd3-89759288f7c6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1860.338910] env[63024]: DEBUG oslo_concurrency.lockutils [req-0d9d6a1f-2c6a-4930-8c54-f634e483f88e req-831d4249-58ea-4522-bdf4-bf81e8c64dea service nova] Lock "77c27741-ee3a-4a8b-bbd3-89759288f7c6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1860.339273] env[63024]: DEBUG nova.compute.manager [req-0d9d6a1f-2c6a-4930-8c54-f634e483f88e req-831d4249-58ea-4522-bdf4-bf81e8c64dea service nova] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] No waiting events found dispatching network-vif-plugged-7cedcfa3-1f00-4ebd-88b4-207d64c14235 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1860.339543] env[63024]: WARNING nova.compute.manager [req-0d9d6a1f-2c6a-4930-8c54-f634e483f88e req-831d4249-58ea-4522-bdf4-bf81e8c64dea service nova] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Received unexpected event network-vif-plugged-7cedcfa3-1f00-4ebd-88b4-207d64c14235 for instance with vm_state building and task_state spawning. [ 1860.341218] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1860.346891] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Waiting for the task: (returnval){ [ 1860.346891] env[63024]: value = "task-1951145" [ 1860.346891] env[63024]: _type = "Task" [ 1860.346891] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1860.357917] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951145, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1860.375716] env[63024]: DEBUG nova.network.neutron [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Successfully updated port: 7cedcfa3-1f00-4ebd-88b4-207d64c14235 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1860.437247] env[63024]: DEBUG oslo_concurrency.lockutils [req-310bf13a-273a-48f2-88cb-e0b526bb9474 req-39f98786-ab0d-4df1-bb67-4a74c125914e service nova] Releasing lock "refresh_cache-e1be531c-e849-42ac-8319-5bd453a7a562" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1860.554727] env[63024]: DEBUG nova.compute.utils [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1860.571272] env[63024]: DEBUG nova.compute.manager [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1860.577162] env[63024]: DEBUG nova.compute.manager [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1860.577162] env[63024]: DEBUG nova.network.neutron [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1860.581044] env[63024]: DEBUG nova.network.neutron [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Updating instance_info_cache with network_info: [{"id": "611e1e79-ffb8-4ba9-8718-b57360eaa492", "address": "fa:16:3e:f8:21:2c", "network": {"id": "67ab6461-8fa5-4dfd-9267-561a710ae91b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1814728209-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1886be852b01400aaf7a31c8fe5d4d7a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap611e1e79-ff", "ovs_interfaceid": "611e1e79-ffb8-4ba9-8718-b57360eaa492", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1860.635023] env[63024]: DEBUG nova.policy [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '73669d10734b403e843e73b3c63bef00', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '12d782556c614caf84a51b37fa43b5de', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1860.737469] env[63024]: DEBUG oslo_vmware.api [None req-4a8357c2-d3bd-46b3-9705-23a858a5bc94 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Task: {'id': task-1951144, 'name': PowerOffVM_Task, 'duration_secs': 0.277825} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1860.737972] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a8357c2-d3bd-46b3-9705-23a858a5bc94 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1860.738341] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4a8357c2-d3bd-46b3-9705-23a858a5bc94 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1860.738778] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-20cbfe63-a261-487b-b278-59fc40b12176 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.859259] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951145, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.50388} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1860.859259] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] e1be531c-e849-42ac-8319-5bd453a7a562/e1be531c-e849-42ac-8319-5bd453a7a562.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1860.859545] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1860.859622] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fc7798bb-e0b3-4a39-b63f-94c354a765cb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1860.870888] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Waiting for the task: (returnval){ [ 1860.870888] env[63024]: value = "task-1951147" [ 1860.870888] env[63024]: _type = "Task" [ 1860.870888] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1860.879988] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951147, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1860.885813] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Acquiring lock "refresh_cache-77c27741-ee3a-4a8b-bbd3-89759288f7c6" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1860.885813] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Acquired lock "refresh_cache-77c27741-ee3a-4a8b-bbd3-89759288f7c6" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1860.885813] env[63024]: DEBUG nova.network.neutron [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1860.955999] env[63024]: DEBUG nova.network.neutron [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Successfully created port: 7d713c35-a0bd-4dd8-b1a9-b838b3bc7a46 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1861.014443] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4a8357c2-d3bd-46b3-9705-23a858a5bc94 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1861.014762] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4a8357c2-d3bd-46b3-9705-23a858a5bc94 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1861.014966] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a8357c2-d3bd-46b3-9705-23a858a5bc94 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Deleting the datastore file [datastore1] e0a37f54-14ca-4eea-a9b3-6e652ca1e48d {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1861.015258] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-acf806ef-5726-4e3c-9df4-34e6a2bd8d7c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.023200] env[63024]: DEBUG oslo_vmware.api [None req-4a8357c2-d3bd-46b3-9705-23a858a5bc94 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Waiting for the task: (returnval){ [ 1861.023200] env[63024]: value = "task-1951148" [ 1861.023200] env[63024]: _type = "Task" [ 1861.023200] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1861.037508] env[63024]: DEBUG oslo_vmware.api [None req-4a8357c2-d3bd-46b3-9705-23a858a5bc94 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Task: {'id': task-1951148, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1861.080690] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Applying migration context for instance 1709d916-d0c4-4706-b41b-8b0ed25f3331 as it has an incoming, in-progress migration 787068cf-2789-4013-8b27-8a10a4f14022. Migration status is post-migrating {{(pid=63024) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1861.084139] env[63024]: INFO nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Updating resource usage from migration 787068cf-2789-4013-8b27-8a10a4f14022 [ 1861.091093] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Releasing lock "refresh_cache-1709d916-d0c4-4706-b41b-8b0ed25f3331" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1861.105838] env[63024]: DEBUG nova.compute.manager [req-56c55ca0-b204-429a-b3b6-7bba9f1ad75c req-3bf818d5-e12c-4a20-93c7-8cdf8af5e6be service nova] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Received event network-changed-9241a9a2-d60f-4c1f-a092-5caa7b738112 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1861.106054] env[63024]: DEBUG nova.compute.manager [req-56c55ca0-b204-429a-b3b6-7bba9f1ad75c req-3bf818d5-e12c-4a20-93c7-8cdf8af5e6be service nova] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Refreshing instance network info cache due to event network-changed-9241a9a2-d60f-4c1f-a092-5caa7b738112. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1861.110387] env[63024]: DEBUG oslo_concurrency.lockutils [req-56c55ca0-b204-429a-b3b6-7bba9f1ad75c req-3bf818d5-e12c-4a20-93c7-8cdf8af5e6be service nova] Acquiring lock "refresh_cache-1448c924-7c61-4c43-a4e7-5a6dd45375cc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1861.110387] env[63024]: DEBUG oslo_concurrency.lockutils [req-56c55ca0-b204-429a-b3b6-7bba9f1ad75c req-3bf818d5-e12c-4a20-93c7-8cdf8af5e6be service nova] Acquired lock "refresh_cache-1448c924-7c61-4c43-a4e7-5a6dd45375cc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1861.110387] env[63024]: DEBUG nova.network.neutron [req-56c55ca0-b204-429a-b3b6-7bba9f1ad75c req-3bf818d5-e12c-4a20-93c7-8cdf8af5e6be service nova] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Refreshing network info cache for port 9241a9a2-d60f-4c1f-a092-5caa7b738112 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1861.120533] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1861.121309] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1861.121501] env[63024]: WARNING nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance b7f26f0e-d5a9-42a6-8af2-065659f89cf5 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1861.121664] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance e8ad74ce-7862-4574-98e7-14bc54bd5d6c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1861.121806] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance df2933d1-32c3-48a6-8ceb-d5e3047d0b78 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1861.121949] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 37792b57-3347-4134-a060-53359afa3298 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1861.122108] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance c1fd4146-6dd3-49e9-a744-466e6168e158 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1861.122262] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 82b7019c-5049-4b8b-abb4-46f326ce3d5b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1861.122637] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance b588ea21-dea0-4ee6-8f9e-12007d0a1ce1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1861.122637] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 31a693b6-293a-4f01-9baf-a9e7e8d453d4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1861.122784] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 601a003d-811c-4698-b0b6-054482d32c21 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1861.123093] env[63024]: WARNING nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 650a97b9-911e-44b0-9e82-a6d4cc95c9dd is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'VCPU': 1, 'MEMORY_MB': 192, 'DISK_GB': 1}}. Skipping heal of allocation because we do not know what to do. [ 1861.123235] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 839776ef-0562-424d-b301-2aa896f32e14 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1861.123460] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 9267e5e4-732d-47f1-8a30-d926a1269fb9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1861.123612] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1861.123612] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance ac60546a-37b2-4d2a-8505-61fe202e2ed0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1861.124029] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 7cf0ac90-d87d-4644-8a88-da5328d1721d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1861.124291] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 92d1f96e-bbe7-4654-9d3a-47ba40057157 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1861.124517] env[63024]: WARNING nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance c12774e4-77d1-4001-8d5d-0240dfed4ead is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1861.125282] env[63024]: WARNING nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 9bf1316e-f1ae-426e-a0a2-d814a2460c4d is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1861.125282] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance fe6847e2-a742-4338-983f-698c13aaefde actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1861.125282] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 1448c924-7c61-4c43-a4e7-5a6dd45375cc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1861.125282] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 73db94b8-cfa8-4457-bccb-d4b780edbd93 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1861.125447] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance e0a37f54-14ca-4eea-a9b3-6e652ca1e48d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1861.125535] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 52c17abc-78f0-417b-8675-e8d62bc8baa3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1861.125677] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance e1be531c-e849-42ac-8319-5bd453a7a562 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1861.125956] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 77c27741-ee3a-4a8b-bbd3-89759288f7c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1861.125956] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 01b8072a-4483-4932-8294-7e5b48e6b203 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1861.391926] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951147, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.245768} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1861.392791] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1861.394956] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf40ceae-5d35-4efc-bdfb-8ac7bb2d262e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.432940] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] e1be531c-e849-42ac-8319-5bd453a7a562/e1be531c-e849-42ac-8319-5bd453a7a562.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1861.433481] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-781d664c-1035-4b87-8a61-62ebe9835e46 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.455213] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Waiting for the task: (returnval){ [ 1861.455213] env[63024]: value = "task-1951149" [ 1861.455213] env[63024]: _type = "Task" [ 1861.455213] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1861.460025] env[63024]: DEBUG nova.network.neutron [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1861.468026] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951149, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1861.534784] env[63024]: DEBUG oslo_vmware.api [None req-4a8357c2-d3bd-46b3-9705-23a858a5bc94 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Task: {'id': task-1951148, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152639} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1861.534988] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a8357c2-d3bd-46b3-9705-23a858a5bc94 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1861.535186] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4a8357c2-d3bd-46b3-9705-23a858a5bc94 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1861.535401] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4a8357c2-d3bd-46b3-9705-23a858a5bc94 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1861.535586] env[63024]: INFO nova.compute.manager [None req-4a8357c2-d3bd-46b3-9705-23a858a5bc94 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Took 1.34 seconds to destroy the instance on the hypervisor. [ 1861.535887] env[63024]: DEBUG oslo.service.loopingcall [None req-4a8357c2-d3bd-46b3-9705-23a858a5bc94 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1861.536150] env[63024]: DEBUG nova.compute.manager [-] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1861.536286] env[63024]: DEBUG nova.network.neutron [-] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1861.597052] env[63024]: DEBUG nova.compute.manager [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1861.621080] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24442b43-5e36-4056-a655-2d8007d1f872 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.629369] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 9a7f4452-ae50-4779-8474-11d3a6d3533f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1861.646170] env[63024]: DEBUG nova.virt.hardware [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1861.646431] env[63024]: DEBUG nova.virt.hardware [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1861.646585] env[63024]: DEBUG nova.virt.hardware [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1861.646762] env[63024]: DEBUG nova.virt.hardware [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1861.646905] env[63024]: DEBUG nova.virt.hardware [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1861.647059] env[63024]: DEBUG nova.virt.hardware [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1861.647285] env[63024]: DEBUG nova.virt.hardware [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1861.647449] env[63024]: DEBUG nova.virt.hardware [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1861.647648] env[63024]: DEBUG nova.virt.hardware [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1861.647800] env[63024]: DEBUG nova.virt.hardware [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1861.647984] env[63024]: DEBUG nova.virt.hardware [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1861.649067] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-952d0b8b-47d6-4917-8e00-c7f3f9d556d2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.653906] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8bb24a2-8b89-412b-a9c0-5d7f54ee84c0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.665083] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e4b7ca9-b2a0-4d81-aa1d-7157240f6553 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.669145] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Updating instance '1709d916-d0c4-4706-b41b-8b0ed25f3331' progress to 83 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1861.967356] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951149, 'name': ReconfigVM_Task, 'duration_secs': 0.472618} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1861.967673] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Reconfigured VM instance instance-00000046 to attach disk [datastore1] e1be531c-e849-42ac-8319-5bd453a7a562/e1be531c-e849-42ac-8319-5bd453a7a562.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1861.968628] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8bc3fb87-9d38-45a7-8d3a-f6a0b4f597d7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1861.976984] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Waiting for the task: (returnval){ [ 1861.976984] env[63024]: value = "task-1951150" [ 1861.976984] env[63024]: _type = "Task" [ 1861.976984] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1861.986616] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951150, 'name': Rename_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.154524] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 0f371c69-c7ae-4649-b038-be82e8ca74e1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1862.154524] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Migration 787068cf-2789-4013-8b27-8a10a4f14022 is active on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 192, 'DISK_GB': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1862.176065] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1862.176644] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6283b7cf-ec2e-486b-a96c-9c1ca9ae0baf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.186021] env[63024]: DEBUG oslo_vmware.api [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1862.186021] env[63024]: value = "task-1951151" [ 1862.186021] env[63024]: _type = "Task" [ 1862.186021] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1862.196161] env[63024]: DEBUG oslo_vmware.api [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951151, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.275825] env[63024]: DEBUG nova.network.neutron [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Updating instance_info_cache with network_info: [{"id": "7cedcfa3-1f00-4ebd-88b4-207d64c14235", "address": "fa:16:3e:9c:8b:1f", "network": {"id": "384d05e3-ef53-40f3-8a75-21f850df070c", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-411167579-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e138433d59374418952a186a4d2a0f78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7cedcfa3-1f", "ovs_interfaceid": "7cedcfa3-1f00-4ebd-88b4-207d64c14235", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1862.491255] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951150, 'name': Rename_Task, 'duration_secs': 0.180263} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1862.491564] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1862.491891] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ddab8050-22c7-4858-a0b9-ced07b62c0b8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.502793] env[63024]: DEBUG nova.network.neutron [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Successfully updated port: 7d713c35-a0bd-4dd8-b1a9-b838b3bc7a46 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1862.509805] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Waiting for the task: (returnval){ [ 1862.509805] env[63024]: value = "task-1951152" [ 1862.509805] env[63024]: _type = "Task" [ 1862.509805] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1862.511181] env[63024]: DEBUG nova.network.neutron [req-56c55ca0-b204-429a-b3b6-7bba9f1ad75c req-3bf818d5-e12c-4a20-93c7-8cdf8af5e6be service nova] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Updated VIF entry in instance network info cache for port 9241a9a2-d60f-4c1f-a092-5caa7b738112. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1862.515037] env[63024]: DEBUG nova.network.neutron [req-56c55ca0-b204-429a-b3b6-7bba9f1ad75c req-3bf818d5-e12c-4a20-93c7-8cdf8af5e6be service nova] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Updating instance_info_cache with network_info: [{"id": "9241a9a2-d60f-4c1f-a092-5caa7b738112", "address": "fa:16:3e:2a:d5:7a", "network": {"id": "ce29ceef-bd62-4366-81a9-4c99d66ac178", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-898882035-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.236", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "4c262cc280074a0bb3b8967f2cbb7c73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec3f9e71-839a-429d-b211-d3dfc98ca4f6", "external-id": "nsx-vlan-transportzone-5", "segmentation_id": 5, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9241a9a2-d6", "ovs_interfaceid": "9241a9a2-d60f-4c1f-a092-5caa7b738112", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1862.526277] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951152, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.658058] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1862.658440] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 1709d916-d0c4-4706-b41b-8b0ed25f3331 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1862.698751] env[63024]: DEBUG oslo_vmware.api [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951151, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1862.779357] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Releasing lock "refresh_cache-77c27741-ee3a-4a8b-bbd3-89759288f7c6" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1862.779757] env[63024]: DEBUG nova.compute.manager [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Instance network_info: |[{"id": "7cedcfa3-1f00-4ebd-88b4-207d64c14235", "address": "fa:16:3e:9c:8b:1f", "network": {"id": "384d05e3-ef53-40f3-8a75-21f850df070c", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-411167579-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e138433d59374418952a186a4d2a0f78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7cedcfa3-1f", "ovs_interfaceid": "7cedcfa3-1f00-4ebd-88b4-207d64c14235", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1862.780594] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9c:8b:1f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a0a76279-3c11-4bef-b124-2a2ee13fa377', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7cedcfa3-1f00-4ebd-88b4-207d64c14235', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1862.789245] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Creating folder: Project (e138433d59374418952a186a4d2a0f78). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1862.789620] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bcaf0791-59b6-4489-b77b-3e020a9b1843 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.807311] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Created folder: Project (e138433d59374418952a186a4d2a0f78) in parent group-v401959. [ 1862.807631] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Creating folder: Instances. Parent ref: group-v402153. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1862.808475] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aa7007d9-5cfb-457b-bc85-db669b510dee {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.821637] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Created folder: Instances in parent group-v402153. [ 1862.821869] env[63024]: DEBUG oslo.service.loopingcall [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1862.822058] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1862.822329] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6d16e8bb-1bd1-44ae-b92d-e0f1d9a25716 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1862.853166] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1862.853166] env[63024]: value = "task-1951155" [ 1862.853166] env[63024]: _type = "Task" [ 1862.853166] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1862.864222] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951155, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1863.011554] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquiring lock "refresh_cache-01b8072a-4483-4932-8294-7e5b48e6b203" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1863.011749] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquired lock "refresh_cache-01b8072a-4483-4932-8294-7e5b48e6b203" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1863.011869] env[63024]: DEBUG nova.network.neutron [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1863.021491] env[63024]: DEBUG oslo_concurrency.lockutils [req-56c55ca0-b204-429a-b3b6-7bba9f1ad75c req-3bf818d5-e12c-4a20-93c7-8cdf8af5e6be service nova] Releasing lock "refresh_cache-1448c924-7c61-4c43-a4e7-5a6dd45375cc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1863.021861] env[63024]: DEBUG nova.compute.manager [req-56c55ca0-b204-429a-b3b6-7bba9f1ad75c req-3bf818d5-e12c-4a20-93c7-8cdf8af5e6be service nova] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Received event network-changed-9241a9a2-d60f-4c1f-a092-5caa7b738112 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1863.022095] env[63024]: DEBUG nova.compute.manager [req-56c55ca0-b204-429a-b3b6-7bba9f1ad75c req-3bf818d5-e12c-4a20-93c7-8cdf8af5e6be service nova] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Refreshing instance network info cache due to event network-changed-9241a9a2-d60f-4c1f-a092-5caa7b738112. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1863.022342] env[63024]: DEBUG oslo_concurrency.lockutils [req-56c55ca0-b204-429a-b3b6-7bba9f1ad75c req-3bf818d5-e12c-4a20-93c7-8cdf8af5e6be service nova] Acquiring lock "refresh_cache-1448c924-7c61-4c43-a4e7-5a6dd45375cc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1863.022456] env[63024]: DEBUG oslo_concurrency.lockutils [req-56c55ca0-b204-429a-b3b6-7bba9f1ad75c req-3bf818d5-e12c-4a20-93c7-8cdf8af5e6be service nova] Acquired lock "refresh_cache-1448c924-7c61-4c43-a4e7-5a6dd45375cc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1863.022826] env[63024]: DEBUG nova.network.neutron [req-56c55ca0-b204-429a-b3b6-7bba9f1ad75c req-3bf818d5-e12c-4a20-93c7-8cdf8af5e6be service nova] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Refreshing network info cache for port 9241a9a2-d60f-4c1f-a092-5caa7b738112 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1863.033506] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951152, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1863.165045] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 9edbda30-2e28-4961-a6ad-5ab34c40ed44 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1863.201059] env[63024]: DEBUG oslo_vmware.api [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951151, 'name': PowerOnVM_Task, 'duration_secs': 0.52585} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1863.201407] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1863.201596] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-0270d635-53cb-4676-9f81-c065279417cb tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Updating instance '1709d916-d0c4-4706-b41b-8b0ed25f3331' progress to 100 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1863.337564] env[63024]: DEBUG nova.compute.manager [req-b821d630-2d74-4fc5-b828-f5b971c40910 req-0532c74a-0c55-4db9-8a55-3e41b3be9b68 service nova] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Received event network-changed-7cedcfa3-1f00-4ebd-88b4-207d64c14235 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1863.337564] env[63024]: DEBUG nova.compute.manager [req-b821d630-2d74-4fc5-b828-f5b971c40910 req-0532c74a-0c55-4db9-8a55-3e41b3be9b68 service nova] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Refreshing instance network info cache due to event network-changed-7cedcfa3-1f00-4ebd-88b4-207d64c14235. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1863.337564] env[63024]: DEBUG oslo_concurrency.lockutils [req-b821d630-2d74-4fc5-b828-f5b971c40910 req-0532c74a-0c55-4db9-8a55-3e41b3be9b68 service nova] Acquiring lock "refresh_cache-77c27741-ee3a-4a8b-bbd3-89759288f7c6" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1863.337564] env[63024]: DEBUG oslo_concurrency.lockutils [req-b821d630-2d74-4fc5-b828-f5b971c40910 req-0532c74a-0c55-4db9-8a55-3e41b3be9b68 service nova] Acquired lock "refresh_cache-77c27741-ee3a-4a8b-bbd3-89759288f7c6" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1863.337564] env[63024]: DEBUG nova.network.neutron [req-b821d630-2d74-4fc5-b828-f5b971c40910 req-0532c74a-0c55-4db9-8a55-3e41b3be9b68 service nova] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Refreshing network info cache for port 7cedcfa3-1f00-4ebd-88b4-207d64c14235 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1863.361036] env[63024]: DEBUG nova.compute.manager [req-64f63567-fb2e-4263-aea1-980db5be75f4 req-bb4a48f8-72e2-495b-9d9c-1508ab955f04 service nova] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Received event network-vif-deleted-1ec87f7b-43ea-4f5e-b378-3919f7767904 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1863.361338] env[63024]: INFO nova.compute.manager [req-64f63567-fb2e-4263-aea1-980db5be75f4 req-bb4a48f8-72e2-495b-9d9c-1508ab955f04 service nova] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Neutron deleted interface 1ec87f7b-43ea-4f5e-b378-3919f7767904; detaching it from the instance and deleting it from the info cache [ 1863.361580] env[63024]: DEBUG nova.network.neutron [req-64f63567-fb2e-4263-aea1-980db5be75f4 req-bb4a48f8-72e2-495b-9d9c-1508ab955f04 service nova] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Updating instance_info_cache with network_info: [{"id": "dcca960a-05bc-4b0e-b542-36ec3c6c3543", "address": "fa:16:3e:b6:57:00", "network": {"id": "79226672-68e0-4c51-b157-fd2bec47c28d", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-771192213", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.214", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "674f344eaf784662ac922405620a3ac4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f880ac2e-d532-4f54-87bb-998a8d1bca78", "external-id": "nsx-vlan-transportzone-491", "segmentation_id": 491, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdcca960a-05", "ovs_interfaceid": "dcca960a-05bc-4b0e-b542-36ec3c6c3543", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1863.369299] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951155, 'name': CreateVM_Task, 'duration_secs': 0.430543} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1863.369462] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1863.370159] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1863.370453] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1863.371470] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1863.371470] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c8293340-e36a-4e78-a707-a778eefd2d90 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.376521] env[63024]: DEBUG oslo_vmware.api [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Waiting for the task: (returnval){ [ 1863.376521] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52612378-e035-8e61-c230-6c9ac3d17101" [ 1863.376521] env[63024]: _type = "Task" [ 1863.376521] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1863.385915] env[63024]: DEBUG oslo_vmware.api [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52612378-e035-8e61-c230-6c9ac3d17101, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1863.508371] env[63024]: DEBUG nova.network.neutron [-] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1863.532017] env[63024]: DEBUG oslo_vmware.api [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951152, 'name': PowerOnVM_Task, 'duration_secs': 0.613604} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1863.532017] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1863.532017] env[63024]: INFO nova.compute.manager [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Took 7.52 seconds to spawn the instance on the hypervisor. [ 1863.532017] env[63024]: DEBUG nova.compute.manager [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1863.532017] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2e33f95-bd9a-4feb-afac-9fa0d4fc0fdc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.616633] env[63024]: DEBUG nova.network.neutron [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1863.668895] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance c28e7c21-7e7d-4cda-81e8-63538bd8a1f7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1863.868196] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-90fa869f-087f-41cb-92b7-536971390021 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.882016] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb41c280-81a9-4a75-8cd9-be9005694055 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.904582] env[63024]: DEBUG oslo_vmware.api [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52612378-e035-8e61-c230-6c9ac3d17101, 'name': SearchDatastore_Task, 'duration_secs': 0.014703} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1863.905311] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1863.905827] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1863.906346] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1863.906745] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1863.908187] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1863.908187] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b0f51e0d-ead1-4884-856e-e356b296333f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.935300] env[63024]: DEBUG nova.compute.manager [req-64f63567-fb2e-4263-aea1-980db5be75f4 req-bb4a48f8-72e2-495b-9d9c-1508ab955f04 service nova] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Detach interface failed, port_id=1ec87f7b-43ea-4f5e-b378-3919f7767904, reason: Instance e0a37f54-14ca-4eea-a9b3-6e652ca1e48d could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 1863.935300] env[63024]: DEBUG nova.compute.manager [req-64f63567-fb2e-4263-aea1-980db5be75f4 req-bb4a48f8-72e2-495b-9d9c-1508ab955f04 service nova] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Received event network-changed-9241a9a2-d60f-4c1f-a092-5caa7b738112 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1863.935300] env[63024]: DEBUG nova.compute.manager [req-64f63567-fb2e-4263-aea1-980db5be75f4 req-bb4a48f8-72e2-495b-9d9c-1508ab955f04 service nova] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Refreshing instance network info cache due to event network-changed-9241a9a2-d60f-4c1f-a092-5caa7b738112. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1863.935300] env[63024]: DEBUG oslo_concurrency.lockutils [req-64f63567-fb2e-4263-aea1-980db5be75f4 req-bb4a48f8-72e2-495b-9d9c-1508ab955f04 service nova] Acquiring lock "refresh_cache-1448c924-7c61-4c43-a4e7-5a6dd45375cc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1863.947023] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1863.947023] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1863.947023] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6581f39d-bc63-48fc-9882-892524f1c0da {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1863.956835] env[63024]: DEBUG oslo_vmware.api [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Waiting for the task: (returnval){ [ 1863.956835] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5249b25d-dcac-82e1-6aa1-8d01ed32a862" [ 1863.956835] env[63024]: _type = "Task" [ 1863.956835] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1863.969615] env[63024]: DEBUG oslo_vmware.api [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5249b25d-dcac-82e1-6aa1-8d01ed32a862, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.013654] env[63024]: INFO nova.compute.manager [-] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Took 2.48 seconds to deallocate network for instance. [ 1864.050781] env[63024]: INFO nova.compute.manager [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Took 38.43 seconds to build instance. [ 1864.111380] env[63024]: DEBUG nova.network.neutron [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Updating instance_info_cache with network_info: [{"id": "7d713c35-a0bd-4dd8-b1a9-b838b3bc7a46", "address": "fa:16:3e:9e:4f:a9", "network": {"id": "20e77237-56c4-40bb-8203-aa035239c938", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1073367986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12d782556c614caf84a51b37fa43b5de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a64108f9-df0a-4feb-bbb5-97f5841c356c", "external-id": "nsx-vlan-transportzone-67", "segmentation_id": 67, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d713c35-a0", "ovs_interfaceid": "7d713c35-a0bd-4dd8-b1a9-b838b3bc7a46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1864.175024] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 56d220f3-b97c-4cbe-b582-c4a4f1171472 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1864.175024] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Total usable vcpus: 48, total allocated vcpus: 26 {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1864.175024] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=5632MB phys_disk=200GB used_disk=26GB total_vcpus=48 used_vcpus=26 pci_stats=[] {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1864.179669] env[63024]: DEBUG nova.network.neutron [req-56c55ca0-b204-429a-b3b6-7bba9f1ad75c req-3bf818d5-e12c-4a20-93c7-8cdf8af5e6be service nova] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Updated VIF entry in instance network info cache for port 9241a9a2-d60f-4c1f-a092-5caa7b738112. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1864.180211] env[63024]: DEBUG nova.network.neutron [req-56c55ca0-b204-429a-b3b6-7bba9f1ad75c req-3bf818d5-e12c-4a20-93c7-8cdf8af5e6be service nova] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Updating instance_info_cache with network_info: [{"id": "9241a9a2-d60f-4c1f-a092-5caa7b738112", "address": "fa:16:3e:2a:d5:7a", "network": {"id": "ce29ceef-bd62-4366-81a9-4c99d66ac178", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-898882035-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.236", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "4c262cc280074a0bb3b8967f2cbb7c73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec3f9e71-839a-429d-b211-d3dfc98ca4f6", "external-id": "nsx-vlan-transportzone-5", "segmentation_id": 5, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9241a9a2-d6", "ovs_interfaceid": "9241a9a2-d60f-4c1f-a092-5caa7b738112", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1864.296971] env[63024]: DEBUG nova.network.neutron [req-b821d630-2d74-4fc5-b828-f5b971c40910 req-0532c74a-0c55-4db9-8a55-3e41b3be9b68 service nova] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Updated VIF entry in instance network info cache for port 7cedcfa3-1f00-4ebd-88b4-207d64c14235. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1864.297532] env[63024]: DEBUG nova.network.neutron [req-b821d630-2d74-4fc5-b828-f5b971c40910 req-0532c74a-0c55-4db9-8a55-3e41b3be9b68 service nova] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Updating instance_info_cache with network_info: [{"id": "7cedcfa3-1f00-4ebd-88b4-207d64c14235", "address": "fa:16:3e:9c:8b:1f", "network": {"id": "384d05e3-ef53-40f3-8a75-21f850df070c", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-411167579-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e138433d59374418952a186a4d2a0f78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7cedcfa3-1f", "ovs_interfaceid": "7cedcfa3-1f00-4ebd-88b4-207d64c14235", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1864.471431] env[63024]: DEBUG oslo_vmware.api [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5249b25d-dcac-82e1-6aa1-8d01ed32a862, 'name': SearchDatastore_Task, 'duration_secs': 0.01623} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1864.475013] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6492606b-3fd5-4718-9d9f-5c6854a24d58 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.481774] env[63024]: DEBUG oslo_vmware.api [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Waiting for the task: (returnval){ [ 1864.481774] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5289a4bc-9b46-0e3f-9b74-fe939d6a2d78" [ 1864.481774] env[63024]: _type = "Task" [ 1864.481774] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1864.494332] env[63024]: DEBUG oslo_vmware.api [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5289a4bc-9b46-0e3f-9b74-fe939d6a2d78, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.519411] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4a8357c2-d3bd-46b3-9705-23a858a5bc94 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1864.553197] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1a26e679-ab3a-4567-91fc-9797aa218d02 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Lock "e1be531c-e849-42ac-8319-5bd453a7a562" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.449s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1864.616080] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Releasing lock "refresh_cache-01b8072a-4483-4932-8294-7e5b48e6b203" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1864.616424] env[63024]: DEBUG nova.compute.manager [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Instance network_info: |[{"id": "7d713c35-a0bd-4dd8-b1a9-b838b3bc7a46", "address": "fa:16:3e:9e:4f:a9", "network": {"id": "20e77237-56c4-40bb-8203-aa035239c938", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1073367986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12d782556c614caf84a51b37fa43b5de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a64108f9-df0a-4feb-bbb5-97f5841c356c", "external-id": "nsx-vlan-transportzone-67", "segmentation_id": 67, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d713c35-a0", "ovs_interfaceid": "7d713c35-a0bd-4dd8-b1a9-b838b3bc7a46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1864.616849] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9e:4f:a9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a64108f9-df0a-4feb-bbb5-97f5841c356c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7d713c35-a0bd-4dd8-b1a9-b838b3bc7a46', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1864.625008] env[63024]: DEBUG oslo.service.loopingcall [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1864.628229] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1864.629624] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b6f4d337-b082-4ec2-945d-999e456845d6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.654926] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1864.654926] env[63024]: value = "task-1951156" [ 1864.654926] env[63024]: _type = "Task" [ 1864.654926] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1864.664314] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951156, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1864.683877] env[63024]: DEBUG oslo_concurrency.lockutils [req-56c55ca0-b204-429a-b3b6-7bba9f1ad75c req-3bf818d5-e12c-4a20-93c7-8cdf8af5e6be service nova] Releasing lock "refresh_cache-1448c924-7c61-4c43-a4e7-5a6dd45375cc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1864.684490] env[63024]: DEBUG oslo_concurrency.lockutils [req-64f63567-fb2e-4263-aea1-980db5be75f4 req-bb4a48f8-72e2-495b-9d9c-1508ab955f04 service nova] Acquired lock "refresh_cache-1448c924-7c61-4c43-a4e7-5a6dd45375cc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1864.684800] env[63024]: DEBUG nova.network.neutron [req-64f63567-fb2e-4263-aea1-980db5be75f4 req-bb4a48f8-72e2-495b-9d9c-1508ab955f04 service nova] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Refreshing network info cache for port 9241a9a2-d60f-4c1f-a092-5caa7b738112 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1864.768569] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f9f88dc-df17-422a-870c-d11c62c0ef7e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.778547] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e55bb481-7a2a-4a29-96e8-c0ec9962df70 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.814137] env[63024]: DEBUG oslo_concurrency.lockutils [req-b821d630-2d74-4fc5-b828-f5b971c40910 req-0532c74a-0c55-4db9-8a55-3e41b3be9b68 service nova] Releasing lock "refresh_cache-77c27741-ee3a-4a8b-bbd3-89759288f7c6" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1864.815324] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11ffeca1-b5f9-4adf-875a-2628094d107a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.824284] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-731832fc-8fed-4981-83e4-1db216c01de5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1864.839047] env[63024]: DEBUG nova.compute.provider_tree [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1864.996892] env[63024]: DEBUG oslo_vmware.api [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5289a4bc-9b46-0e3f-9b74-fe939d6a2d78, 'name': SearchDatastore_Task, 'duration_secs': 0.013648} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1864.997144] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1864.997372] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 77c27741-ee3a-4a8b-bbd3-89759288f7c6/77c27741-ee3a-4a8b-bbd3-89759288f7c6.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1864.997649] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b37858c8-a6b3-4f19-b9a6-c413d3e361ec {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1865.005321] env[63024]: DEBUG oslo_vmware.api [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Waiting for the task: (returnval){ [ 1865.005321] env[63024]: value = "task-1951157" [ 1865.005321] env[63024]: _type = "Task" [ 1865.005321] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1865.015349] env[63024]: DEBUG oslo_vmware.api [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Task: {'id': task-1951157, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.165376] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951156, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.342051] env[63024]: DEBUG nova.scheduler.client.report [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1865.375354] env[63024]: DEBUG nova.network.neutron [req-64f63567-fb2e-4263-aea1-980db5be75f4 req-bb4a48f8-72e2-495b-9d9c-1508ab955f04 service nova] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Updated VIF entry in instance network info cache for port 9241a9a2-d60f-4c1f-a092-5caa7b738112. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1865.375713] env[63024]: DEBUG nova.network.neutron [req-64f63567-fb2e-4263-aea1-980db5be75f4 req-bb4a48f8-72e2-495b-9d9c-1508ab955f04 service nova] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Updating instance_info_cache with network_info: [{"id": "9241a9a2-d60f-4c1f-a092-5caa7b738112", "address": "fa:16:3e:2a:d5:7a", "network": {"id": "ce29ceef-bd62-4366-81a9-4c99d66ac178", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-898882035-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "4c262cc280074a0bb3b8967f2cbb7c73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec3f9e71-839a-429d-b211-d3dfc98ca4f6", "external-id": "nsx-vlan-transportzone-5", "segmentation_id": 5, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9241a9a2-d6", "ovs_interfaceid": "9241a9a2-d60f-4c1f-a092-5caa7b738112", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1865.518788] env[63024]: DEBUG oslo_vmware.api [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Task: {'id': task-1951157, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.675553] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951156, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1865.705236] env[63024]: DEBUG nova.compute.manager [req-1e09be8d-8b42-4736-994f-29891e21c10f req-6b2d2b06-a78f-4e17-8b85-ac36118bc6d8 service nova] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Received event network-vif-plugged-7d713c35-a0bd-4dd8-b1a9-b838b3bc7a46 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1865.705592] env[63024]: DEBUG oslo_concurrency.lockutils [req-1e09be8d-8b42-4736-994f-29891e21c10f req-6b2d2b06-a78f-4e17-8b85-ac36118bc6d8 service nova] Acquiring lock "01b8072a-4483-4932-8294-7e5b48e6b203-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1865.705695] env[63024]: DEBUG oslo_concurrency.lockutils [req-1e09be8d-8b42-4736-994f-29891e21c10f req-6b2d2b06-a78f-4e17-8b85-ac36118bc6d8 service nova] Lock "01b8072a-4483-4932-8294-7e5b48e6b203-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1865.705990] env[63024]: DEBUG oslo_concurrency.lockutils [req-1e09be8d-8b42-4736-994f-29891e21c10f req-6b2d2b06-a78f-4e17-8b85-ac36118bc6d8 service nova] Lock "01b8072a-4483-4932-8294-7e5b48e6b203-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1865.706105] env[63024]: DEBUG nova.compute.manager [req-1e09be8d-8b42-4736-994f-29891e21c10f req-6b2d2b06-a78f-4e17-8b85-ac36118bc6d8 service nova] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] No waiting events found dispatching network-vif-plugged-7d713c35-a0bd-4dd8-b1a9-b838b3bc7a46 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1865.706274] env[63024]: WARNING nova.compute.manager [req-1e09be8d-8b42-4736-994f-29891e21c10f req-6b2d2b06-a78f-4e17-8b85-ac36118bc6d8 service nova] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Received unexpected event network-vif-plugged-7d713c35-a0bd-4dd8-b1a9-b838b3bc7a46 for instance with vm_state building and task_state spawning. [ 1865.706443] env[63024]: DEBUG nova.compute.manager [req-1e09be8d-8b42-4736-994f-29891e21c10f req-6b2d2b06-a78f-4e17-8b85-ac36118bc6d8 service nova] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Received event network-changed-7d713c35-a0bd-4dd8-b1a9-b838b3bc7a46 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1865.706558] env[63024]: DEBUG nova.compute.manager [req-1e09be8d-8b42-4736-994f-29891e21c10f req-6b2d2b06-a78f-4e17-8b85-ac36118bc6d8 service nova] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Refreshing instance network info cache due to event network-changed-7d713c35-a0bd-4dd8-b1a9-b838b3bc7a46. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1865.706743] env[63024]: DEBUG oslo_concurrency.lockutils [req-1e09be8d-8b42-4736-994f-29891e21c10f req-6b2d2b06-a78f-4e17-8b85-ac36118bc6d8 service nova] Acquiring lock "refresh_cache-01b8072a-4483-4932-8294-7e5b48e6b203" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1865.706873] env[63024]: DEBUG oslo_concurrency.lockutils [req-1e09be8d-8b42-4736-994f-29891e21c10f req-6b2d2b06-a78f-4e17-8b85-ac36118bc6d8 service nova] Acquired lock "refresh_cache-01b8072a-4483-4932-8294-7e5b48e6b203" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1865.707047] env[63024]: DEBUG nova.network.neutron [req-1e09be8d-8b42-4736-994f-29891e21c10f req-6b2d2b06-a78f-4e17-8b85-ac36118bc6d8 service nova] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Refreshing network info cache for port 7d713c35-a0bd-4dd8-b1a9-b838b3bc7a46 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1865.730586] env[63024]: DEBUG nova.compute.manager [req-f134fd9c-f98b-47ad-947f-8912cba7da43 req-30d5bb77-ecd9-482b-92cb-386bda33cac0 service nova] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Received event network-changed-9241a9a2-d60f-4c1f-a092-5caa7b738112 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1865.730749] env[63024]: DEBUG nova.compute.manager [req-f134fd9c-f98b-47ad-947f-8912cba7da43 req-30d5bb77-ecd9-482b-92cb-386bda33cac0 service nova] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Refreshing instance network info cache due to event network-changed-9241a9a2-d60f-4c1f-a092-5caa7b738112. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1865.731483] env[63024]: DEBUG oslo_concurrency.lockutils [req-f134fd9c-f98b-47ad-947f-8912cba7da43 req-30d5bb77-ecd9-482b-92cb-386bda33cac0 service nova] Acquiring lock "refresh_cache-1448c924-7c61-4c43-a4e7-5a6dd45375cc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1865.847408] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63024) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1865.848887] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 5.796s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1865.848887] env[63024]: DEBUG oslo_concurrency.lockutils [None req-966c1bab-5142-4c8f-95bf-1ae15db2acaa tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.581s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1865.848887] env[63024]: DEBUG oslo_concurrency.lockutils [None req-966c1bab-5142-4c8f-95bf-1ae15db2acaa tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1865.850750] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.247s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1865.852059] env[63024]: INFO nova.compute.claims [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1865.855169] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1865.855334] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Cleaning up deleted instances {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11558}} [ 1865.879167] env[63024]: DEBUG oslo_concurrency.lockutils [req-64f63567-fb2e-4263-aea1-980db5be75f4 req-bb4a48f8-72e2-495b-9d9c-1508ab955f04 service nova] Releasing lock "refresh_cache-1448c924-7c61-4c43-a4e7-5a6dd45375cc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1865.879167] env[63024]: DEBUG nova.compute.manager [req-64f63567-fb2e-4263-aea1-980db5be75f4 req-bb4a48f8-72e2-495b-9d9c-1508ab955f04 service nova] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Received event network-vif-deleted-dcca960a-05bc-4b0e-b542-36ec3c6c3543 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1865.879167] env[63024]: INFO nova.compute.manager [req-64f63567-fb2e-4263-aea1-980db5be75f4 req-bb4a48f8-72e2-495b-9d9c-1508ab955f04 service nova] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Neutron deleted interface dcca960a-05bc-4b0e-b542-36ec3c6c3543; detaching it from the instance and deleting it from the info cache [ 1865.879380] env[63024]: DEBUG nova.network.neutron [req-64f63567-fb2e-4263-aea1-980db5be75f4 req-bb4a48f8-72e2-495b-9d9c-1508ab955f04 service nova] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1865.880993] env[63024]: DEBUG oslo_concurrency.lockutils [req-f134fd9c-f98b-47ad-947f-8912cba7da43 req-30d5bb77-ecd9-482b-92cb-386bda33cac0 service nova] Acquired lock "refresh_cache-1448c924-7c61-4c43-a4e7-5a6dd45375cc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1865.881380] env[63024]: DEBUG nova.network.neutron [req-f134fd9c-f98b-47ad-947f-8912cba7da43 req-30d5bb77-ecd9-482b-92cb-386bda33cac0 service nova] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Refreshing network info cache for port 9241a9a2-d60f-4c1f-a092-5caa7b738112 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1865.990251] env[63024]: INFO nova.scheduler.client.report [None req-966c1bab-5142-4c8f-95bf-1ae15db2acaa tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Deleted allocations for instance 650a97b9-911e-44b0-9e82-a6d4cc95c9dd [ 1866.018832] env[63024]: DEBUG oslo_vmware.api [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Task: {'id': task-1951157, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.789665} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1866.019189] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 77c27741-ee3a-4a8b-bbd3-89759288f7c6/77c27741-ee3a-4a8b-bbd3-89759288f7c6.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1866.019387] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1866.019631] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0cdc3114-e40c-46ad-9b02-04cf4670bbac {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.027558] env[63024]: DEBUG oslo_vmware.api [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Waiting for the task: (returnval){ [ 1866.027558] env[63024]: value = "task-1951158" [ 1866.027558] env[63024]: _type = "Task" [ 1866.027558] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1866.038017] env[63024]: DEBUG oslo_vmware.api [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Task: {'id': task-1951158, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.094289] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3a2d40d1-5d48-4100-8b49-bb24717ed551 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "1709d916-d0c4-4706-b41b-8b0ed25f3331" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1866.094596] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3a2d40d1-5d48-4100-8b49-bb24717ed551 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "1709d916-d0c4-4706-b41b-8b0ed25f3331" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1866.094790] env[63024]: DEBUG nova.compute.manager [None req-3a2d40d1-5d48-4100-8b49-bb24717ed551 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Going to confirm migration 3 {{(pid=63024) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5113}} [ 1866.170384] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951156, 'name': CreateVM_Task, 'duration_secs': 1.365271} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1866.170561] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1866.171423] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1866.171594] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1866.171948] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1866.172251] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4dd73d6-cb8e-4964-8cbe-3eec439d0ebf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.178595] env[63024]: DEBUG oslo_vmware.api [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1866.178595] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5240441a-436b-9fe1-9b29-a5e438647847" [ 1866.178595] env[63024]: _type = "Task" [ 1866.178595] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1866.252576] env[63024]: DEBUG oslo_vmware.api [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5240441a-436b-9fe1-9b29-a5e438647847, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.299873] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d64135b2-184f-408a-9c8d-81eaf3539cff tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Acquiring lock "52c17abc-78f0-417b-8675-e8d62bc8baa3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1866.300136] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d64135b2-184f-408a-9c8d-81eaf3539cff tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Lock "52c17abc-78f0-417b-8675-e8d62bc8baa3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1866.300341] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d64135b2-184f-408a-9c8d-81eaf3539cff tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Acquiring lock "52c17abc-78f0-417b-8675-e8d62bc8baa3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1866.300523] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d64135b2-184f-408a-9c8d-81eaf3539cff tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Lock "52c17abc-78f0-417b-8675-e8d62bc8baa3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1866.300689] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d64135b2-184f-408a-9c8d-81eaf3539cff tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Lock "52c17abc-78f0-417b-8675-e8d62bc8baa3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1866.302794] env[63024]: INFO nova.compute.manager [None req-d64135b2-184f-408a-9c8d-81eaf3539cff tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Terminating instance [ 1866.369168] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] There are 41 instances to clean {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11567}} [ 1866.369466] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 81f96b5a-b878-4e6c-9683-00528a4d5650] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 1866.384974] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-605c18d1-e571-44c6-b1d5-62469338e04e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.397356] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d440cb24-9579-4c4f-9ce1-0f77dde9729a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.445693] env[63024]: DEBUG nova.compute.manager [req-64f63567-fb2e-4263-aea1-980db5be75f4 req-bb4a48f8-72e2-495b-9d9c-1508ab955f04 service nova] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Detach interface failed, port_id=dcca960a-05bc-4b0e-b542-36ec3c6c3543, reason: Instance e0a37f54-14ca-4eea-a9b3-6e652ca1e48d could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 1866.464082] env[63024]: DEBUG nova.network.neutron [req-1e09be8d-8b42-4736-994f-29891e21c10f req-6b2d2b06-a78f-4e17-8b85-ac36118bc6d8 service nova] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Updated VIF entry in instance network info cache for port 7d713c35-a0bd-4dd8-b1a9-b838b3bc7a46. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1866.464432] env[63024]: DEBUG nova.network.neutron [req-1e09be8d-8b42-4736-994f-29891e21c10f req-6b2d2b06-a78f-4e17-8b85-ac36118bc6d8 service nova] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Updating instance_info_cache with network_info: [{"id": "7d713c35-a0bd-4dd8-b1a9-b838b3bc7a46", "address": "fa:16:3e:9e:4f:a9", "network": {"id": "20e77237-56c4-40bb-8203-aa035239c938", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1073367986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12d782556c614caf84a51b37fa43b5de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a64108f9-df0a-4feb-bbb5-97f5841c356c", "external-id": "nsx-vlan-transportzone-67", "segmentation_id": 67, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d713c35-a0", "ovs_interfaceid": "7d713c35-a0bd-4dd8-b1a9-b838b3bc7a46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1866.499925] env[63024]: DEBUG oslo_concurrency.lockutils [None req-966c1bab-5142-4c8f-95bf-1ae15db2acaa tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Lock "650a97b9-911e-44b0-9e82-a6d4cc95c9dd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.222s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1866.541087] env[63024]: DEBUG oslo_vmware.api [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Task: {'id': task-1951158, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.174566} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1866.541394] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1866.544278] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-821ee317-1571-4b18-9151-90b01086b363 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.567823] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] 77c27741-ee3a-4a8b-bbd3-89759288f7c6/77c27741-ee3a-4a8b-bbd3-89759288f7c6.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1866.572114] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-97b7060d-1d62-4b8e-9f50-ff03c890df2d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.593976] env[63024]: DEBUG oslo_vmware.api [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Waiting for the task: (returnval){ [ 1866.593976] env[63024]: value = "task-1951159" [ 1866.593976] env[63024]: _type = "Task" [ 1866.593976] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1866.609163] env[63024]: DEBUG oslo_vmware.api [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Task: {'id': task-1951159, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.622320] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6a50913e-4588-4852-99fc-35662bc69d8c tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Acquiring lock "e1be531c-e849-42ac-8319-5bd453a7a562" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1866.622636] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6a50913e-4588-4852-99fc-35662bc69d8c tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Lock "e1be531c-e849-42ac-8319-5bd453a7a562" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1866.622843] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6a50913e-4588-4852-99fc-35662bc69d8c tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Acquiring lock "e1be531c-e849-42ac-8319-5bd453a7a562-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1866.623028] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6a50913e-4588-4852-99fc-35662bc69d8c tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Lock "e1be531c-e849-42ac-8319-5bd453a7a562-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1866.623738] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6a50913e-4588-4852-99fc-35662bc69d8c tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Lock "e1be531c-e849-42ac-8319-5bd453a7a562-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1866.626028] env[63024]: INFO nova.compute.manager [None req-6a50913e-4588-4852-99fc-35662bc69d8c tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Terminating instance [ 1866.646424] env[63024]: DEBUG nova.network.neutron [req-f134fd9c-f98b-47ad-947f-8912cba7da43 req-30d5bb77-ecd9-482b-92cb-386bda33cac0 service nova] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Updated VIF entry in instance network info cache for port 9241a9a2-d60f-4c1f-a092-5caa7b738112. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1866.646902] env[63024]: DEBUG nova.network.neutron [req-f134fd9c-f98b-47ad-947f-8912cba7da43 req-30d5bb77-ecd9-482b-92cb-386bda33cac0 service nova] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Updating instance_info_cache with network_info: [{"id": "9241a9a2-d60f-4c1f-a092-5caa7b738112", "address": "fa:16:3e:2a:d5:7a", "network": {"id": "ce29ceef-bd62-4366-81a9-4c99d66ac178", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-898882035-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "4c262cc280074a0bb3b8967f2cbb7c73", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec3f9e71-839a-429d-b211-d3dfc98ca4f6", "external-id": "nsx-vlan-transportzone-5", "segmentation_id": 5, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9241a9a2-d6", "ovs_interfaceid": "9241a9a2-d60f-4c1f-a092-5caa7b738112", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1866.688759] env[63024]: DEBUG oslo_vmware.api [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5240441a-436b-9fe1-9b29-a5e438647847, 'name': SearchDatastore_Task, 'duration_secs': 0.017547} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1866.691612] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1866.691671] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1866.691872] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1866.692569] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1866.692569] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1866.693248] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3a2d40d1-5d48-4100-8b49-bb24717ed551 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "refresh_cache-1709d916-d0c4-4706-b41b-8b0ed25f3331" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1866.693451] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3a2d40d1-5d48-4100-8b49-bb24717ed551 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquired lock "refresh_cache-1709d916-d0c4-4706-b41b-8b0ed25f3331" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1866.693578] env[63024]: DEBUG nova.network.neutron [None req-3a2d40d1-5d48-4100-8b49-bb24717ed551 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1866.693744] env[63024]: DEBUG nova.objects.instance [None req-3a2d40d1-5d48-4100-8b49-bb24717ed551 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lazy-loading 'info_cache' on Instance uuid 1709d916-d0c4-4706-b41b-8b0ed25f3331 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1866.695671] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-deb2d62b-b869-48e7-b402-5ccdb5c5fa14 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.698030] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7f1a3ebc-c409-4527-97cf-a41ee31c1b10 tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Acquiring lock "1448c924-7c61-4c43-a4e7-5a6dd45375cc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1866.698272] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7f1a3ebc-c409-4527-97cf-a41ee31c1b10 tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Lock "1448c924-7c61-4c43-a4e7-5a6dd45375cc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1866.699034] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7f1a3ebc-c409-4527-97cf-a41ee31c1b10 tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Acquiring lock "1448c924-7c61-4c43-a4e7-5a6dd45375cc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1866.699034] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7f1a3ebc-c409-4527-97cf-a41ee31c1b10 tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Lock "1448c924-7c61-4c43-a4e7-5a6dd45375cc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1866.699034] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7f1a3ebc-c409-4527-97cf-a41ee31c1b10 tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Lock "1448c924-7c61-4c43-a4e7-5a6dd45375cc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1866.700588] env[63024]: INFO nova.compute.manager [None req-7f1a3ebc-c409-4527-97cf-a41ee31c1b10 tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Terminating instance [ 1866.718049] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1866.718253] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1866.721302] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9534af9a-eb9c-4381-8e5c-73e4b7352b3f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.727367] env[63024]: DEBUG oslo_vmware.api [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1866.727367] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]522a545d-609c-e875-b208-2c6fd0e6a89a" [ 1866.727367] env[63024]: _type = "Task" [ 1866.727367] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1866.737767] env[63024]: DEBUG oslo_vmware.api [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]522a545d-609c-e875-b208-2c6fd0e6a89a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.806785] env[63024]: DEBUG nova.compute.manager [None req-d64135b2-184f-408a-9c8d-81eaf3539cff tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1866.806994] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d64135b2-184f-408a-9c8d-81eaf3539cff tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1866.807931] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ef86ea6-4712-416a-8436-569e2959dbda {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.817489] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d64135b2-184f-408a-9c8d-81eaf3539cff tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1866.817795] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1f4105d7-8bba-4e70-b870-2b2a47b3f53f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.829229] env[63024]: DEBUG oslo_vmware.api [None req-d64135b2-184f-408a-9c8d-81eaf3539cff tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Waiting for the task: (returnval){ [ 1866.829229] env[63024]: value = "task-1951160" [ 1866.829229] env[63024]: _type = "Task" [ 1866.829229] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1866.839196] env[63024]: DEBUG oslo_vmware.api [None req-d64135b2-184f-408a-9c8d-81eaf3539cff tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951160, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1866.878731] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 6156ce17-3f29-487a-afc5-2fa0fb7f114c] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 1866.949011] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a590b98-9298-440f-a54f-1622fdc9ff1f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.958811] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1daa8680-42dc-4653-9b15-32b368230d42 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1866.995568] env[63024]: DEBUG oslo_concurrency.lockutils [req-1e09be8d-8b42-4736-994f-29891e21c10f req-6b2d2b06-a78f-4e17-8b85-ac36118bc6d8 service nova] Releasing lock "refresh_cache-01b8072a-4483-4932-8294-7e5b48e6b203" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1866.997433] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-667dfb52-2280-49b9-8e78-34bddaf781ce {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.007029] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6240432-9cdf-4bca-a672-114aafa72958 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.024658] env[63024]: DEBUG nova.compute.provider_tree [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1867.105810] env[63024]: DEBUG oslo_vmware.api [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Task: {'id': task-1951159, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.135027] env[63024]: DEBUG nova.compute.manager [None req-6a50913e-4588-4852-99fc-35662bc69d8c tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1867.135027] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-6a50913e-4588-4852-99fc-35662bc69d8c tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1867.135027] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7c243bb-d797-4df4-a154-9275b9add50c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.143022] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a50913e-4588-4852-99fc-35662bc69d8c tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1867.143022] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dd3b2321-9c6d-4d79-bc2e-385731b6a253 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.151022] env[63024]: DEBUG oslo_concurrency.lockutils [req-f134fd9c-f98b-47ad-947f-8912cba7da43 req-30d5bb77-ecd9-482b-92cb-386bda33cac0 service nova] Releasing lock "refresh_cache-1448c924-7c61-4c43-a4e7-5a6dd45375cc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1867.151022] env[63024]: DEBUG oslo_vmware.api [None req-6a50913e-4588-4852-99fc-35662bc69d8c tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Waiting for the task: (returnval){ [ 1867.151022] env[63024]: value = "task-1951161" [ 1867.151022] env[63024]: _type = "Task" [ 1867.151022] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1867.164857] env[63024]: DEBUG oslo_vmware.api [None req-6a50913e-4588-4852-99fc-35662bc69d8c tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951161, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.204082] env[63024]: DEBUG nova.compute.manager [None req-7f1a3ebc-c409-4527-97cf-a41ee31c1b10 tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1867.204445] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-7f1a3ebc-c409-4527-97cf-a41ee31c1b10 tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1867.206054] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76c9a00e-c9b9-4fe3-9aea-257cbe7f31fc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.216612] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f1a3ebc-c409-4527-97cf-a41ee31c1b10 tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1867.216909] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-454ce2e5-f2b0-4e10-a721-f273625a8d19 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.225294] env[63024]: DEBUG oslo_vmware.api [None req-7f1a3ebc-c409-4527-97cf-a41ee31c1b10 tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Waiting for the task: (returnval){ [ 1867.225294] env[63024]: value = "task-1951162" [ 1867.225294] env[63024]: _type = "Task" [ 1867.225294] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1867.245455] env[63024]: DEBUG oslo_vmware.api [None req-7f1a3ebc-c409-4527-97cf-a41ee31c1b10 tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Task: {'id': task-1951162, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.251637] env[63024]: DEBUG oslo_vmware.api [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]522a545d-609c-e875-b208-2c6fd0e6a89a, 'name': SearchDatastore_Task, 'duration_secs': 0.088251} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1867.253018] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41a31a58-32c1-4610-99b7-9985206a965a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.262537] env[63024]: DEBUG oslo_vmware.api [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1867.262537] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52346ee7-c1ac-53c0-a9e4-0b508cbc7d3c" [ 1867.262537] env[63024]: _type = "Task" [ 1867.262537] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1867.274844] env[63024]: DEBUG oslo_vmware.api [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52346ee7-c1ac-53c0-a9e4-0b508cbc7d3c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.341342] env[63024]: DEBUG oslo_vmware.api [None req-d64135b2-184f-408a-9c8d-81eaf3539cff tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951160, 'name': PowerOffVM_Task, 'duration_secs': 0.445177} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1867.341729] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d64135b2-184f-408a-9c8d-81eaf3539cff tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1867.341970] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d64135b2-184f-408a-9c8d-81eaf3539cff tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1867.342749] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-59def135-79d4-4127-b44d-b0f5ec41bb90 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.386861] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 02db92ec-3377-406b-a95c-0022579fa75b] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 1867.529152] env[63024]: DEBUG nova.scheduler.client.report [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1867.606603] env[63024]: DEBUG oslo_vmware.api [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Task: {'id': task-1951159, 'name': ReconfigVM_Task, 'duration_secs': 0.809572} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1867.606918] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Reconfigured VM instance instance-00000047 to attach disk [datastore1] 77c27741-ee3a-4a8b-bbd3-89759288f7c6/77c27741-ee3a-4a8b-bbd3-89759288f7c6.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1867.608229] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fb1e2e4c-343e-4f97-b0c0-689c57f482ac {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.615207] env[63024]: DEBUG oslo_vmware.api [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Waiting for the task: (returnval){ [ 1867.615207] env[63024]: value = "task-1951164" [ 1867.615207] env[63024]: _type = "Task" [ 1867.615207] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1867.624125] env[63024]: DEBUG oslo_vmware.api [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Task: {'id': task-1951164, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.642392] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d64135b2-184f-408a-9c8d-81eaf3539cff tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1867.642662] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d64135b2-184f-408a-9c8d-81eaf3539cff tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1867.642916] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-d64135b2-184f-408a-9c8d-81eaf3539cff tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Deleting the datastore file [datastore1] 52c17abc-78f0-417b-8675-e8d62bc8baa3 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1867.643312] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f9ed53b6-3384-4c27-82e3-bec341c626b0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.650926] env[63024]: DEBUG oslo_vmware.api [None req-d64135b2-184f-408a-9c8d-81eaf3539cff tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Waiting for the task: (returnval){ [ 1867.650926] env[63024]: value = "task-1951165" [ 1867.650926] env[63024]: _type = "Task" [ 1867.650926] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1867.665616] env[63024]: DEBUG oslo_vmware.api [None req-6a50913e-4588-4852-99fc-35662bc69d8c tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951161, 'name': PowerOffVM_Task, 'duration_secs': 0.231971} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1867.668757] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a50913e-4588-4852-99fc-35662bc69d8c tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1867.668994] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-6a50913e-4588-4852-99fc-35662bc69d8c tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1867.669682] env[63024]: DEBUG oslo_vmware.api [None req-d64135b2-184f-408a-9c8d-81eaf3539cff tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951165, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.669857] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-19cc3054-c341-48ba-b90f-4311bf441eba {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.740691] env[63024]: DEBUG oslo_vmware.api [None req-7f1a3ebc-c409-4527-97cf-a41ee31c1b10 tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Task: {'id': task-1951162, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.773512] env[63024]: DEBUG oslo_vmware.api [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52346ee7-c1ac-53c0-a9e4-0b508cbc7d3c, 'name': SearchDatastore_Task, 'duration_secs': 0.017375} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1867.773810] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1867.774102] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 01b8072a-4483-4932-8294-7e5b48e6b203/01b8072a-4483-4932-8294-7e5b48e6b203.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1867.774404] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-58312cf5-1a2a-4385-9c4f-4f36dad52aa5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.785370] env[63024]: DEBUG oslo_vmware.api [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1867.785370] env[63024]: value = "task-1951167" [ 1867.785370] env[63024]: _type = "Task" [ 1867.785370] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1867.791597] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-6a50913e-4588-4852-99fc-35662bc69d8c tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1867.791812] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-6a50913e-4588-4852-99fc-35662bc69d8c tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1867.791985] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a50913e-4588-4852-99fc-35662bc69d8c tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Deleting the datastore file [datastore1] e1be531c-e849-42ac-8319-5bd453a7a562 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1867.792312] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f7de5ab2-9f58-4f21-8a39-6730b688f29a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1867.797214] env[63024]: DEBUG oslo_vmware.api [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951167, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.805252] env[63024]: DEBUG oslo_vmware.api [None req-6a50913e-4588-4852-99fc-35662bc69d8c tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Waiting for the task: (returnval){ [ 1867.805252] env[63024]: value = "task-1951168" [ 1867.805252] env[63024]: _type = "Task" [ 1867.805252] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1867.814789] env[63024]: DEBUG oslo_vmware.api [None req-6a50913e-4588-4852-99fc-35662bc69d8c tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951168, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1867.892902] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 2dd20650-9273-432a-be28-73ccb66c721d] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 1867.942109] env[63024]: DEBUG nova.network.neutron [None req-3a2d40d1-5d48-4100-8b49-bb24717ed551 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Updating instance_info_cache with network_info: [{"id": "611e1e79-ffb8-4ba9-8718-b57360eaa492", "address": "fa:16:3e:f8:21:2c", "network": {"id": "67ab6461-8fa5-4dfd-9267-561a710ae91b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1814728209-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1886be852b01400aaf7a31c8fe5d4d7a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap611e1e79-ff", "ovs_interfaceid": "611e1e79-ffb8-4ba9-8718-b57360eaa492", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1868.036311] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9858a7ec-7d80-4734-a8c9-df8a139f53ee tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Acquiring lock "f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1868.036597] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9858a7ec-7d80-4734-a8c9-df8a139f53ee tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Lock "f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1868.036806] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9858a7ec-7d80-4734-a8c9-df8a139f53ee tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Acquiring lock "f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1868.036997] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9858a7ec-7d80-4734-a8c9-df8a139f53ee tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Lock "f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1868.037271] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9858a7ec-7d80-4734-a8c9-df8a139f53ee tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Lock "f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1868.039890] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.190s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1868.040480] env[63024]: DEBUG nova.compute.manager [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1868.044036] env[63024]: INFO nova.compute.manager [None req-9858a7ec-7d80-4734-a8c9-df8a139f53ee tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Terminating instance [ 1868.047054] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.765s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1868.047462] env[63024]: INFO nova.compute.claims [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1868.132884] env[63024]: DEBUG oslo_vmware.api [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Task: {'id': task-1951164, 'name': Rename_Task, 'duration_secs': 0.172867} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1868.132884] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1868.132884] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a6855d69-0461-42e3-8b33-8b2735542e24 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.141285] env[63024]: DEBUG oslo_vmware.api [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Waiting for the task: (returnval){ [ 1868.141285] env[63024]: value = "task-1951169" [ 1868.141285] env[63024]: _type = "Task" [ 1868.141285] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1868.154265] env[63024]: DEBUG oslo_vmware.api [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Task: {'id': task-1951169, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.168852] env[63024]: DEBUG oslo_vmware.api [None req-d64135b2-184f-408a-9c8d-81eaf3539cff tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951165, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.181665} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1868.169139] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-d64135b2-184f-408a-9c8d-81eaf3539cff tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1868.169344] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d64135b2-184f-408a-9c8d-81eaf3539cff tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1868.169522] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d64135b2-184f-408a-9c8d-81eaf3539cff tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1868.169693] env[63024]: INFO nova.compute.manager [None req-d64135b2-184f-408a-9c8d-81eaf3539cff tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Took 1.36 seconds to destroy the instance on the hypervisor. [ 1868.169939] env[63024]: DEBUG oslo.service.loopingcall [None req-d64135b2-184f-408a-9c8d-81eaf3539cff tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1868.170146] env[63024]: DEBUG nova.compute.manager [-] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1868.170242] env[63024]: DEBUG nova.network.neutron [-] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1868.238771] env[63024]: DEBUG oslo_vmware.api [None req-7f1a3ebc-c409-4527-97cf-a41ee31c1b10 tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Task: {'id': task-1951162, 'name': PowerOffVM_Task, 'duration_secs': 0.60076} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1868.241411] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-7f1a3ebc-c409-4527-97cf-a41ee31c1b10 tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1868.241620] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-7f1a3ebc-c409-4527-97cf-a41ee31c1b10 tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1868.241883] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ecd38090-25ab-4c86-9cd3-5d691faf9016 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.297975] env[63024]: DEBUG oslo_vmware.api [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951167, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.315953] env[63024]: DEBUG oslo_vmware.api [None req-6a50913e-4588-4852-99fc-35662bc69d8c tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951168, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.162515} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1868.316397] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a50913e-4588-4852-99fc-35662bc69d8c tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1868.316477] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-6a50913e-4588-4852-99fc-35662bc69d8c tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1868.316584] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-6a50913e-4588-4852-99fc-35662bc69d8c tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1868.316758] env[63024]: INFO nova.compute.manager [None req-6a50913e-4588-4852-99fc-35662bc69d8c tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1868.316991] env[63024]: DEBUG oslo.service.loopingcall [None req-6a50913e-4588-4852-99fc-35662bc69d8c tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1868.317331] env[63024]: DEBUG nova.compute.manager [-] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1868.317421] env[63024]: DEBUG nova.network.neutron [-] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1868.394535] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 5c2efe96-4ac4-4693-9203-43407d768f66] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 1868.431462] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-7f1a3ebc-c409-4527-97cf-a41ee31c1b10 tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1868.431811] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-7f1a3ebc-c409-4527-97cf-a41ee31c1b10 tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1868.432107] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f1a3ebc-c409-4527-97cf-a41ee31c1b10 tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Deleting the datastore file [datastore1] 1448c924-7c61-4c43-a4e7-5a6dd45375cc {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1868.432532] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f1b20c09-b727-4918-9d83-c11bc261f90e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.444032] env[63024]: DEBUG oslo_vmware.api [None req-7f1a3ebc-c409-4527-97cf-a41ee31c1b10 tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Waiting for the task: (returnval){ [ 1868.444032] env[63024]: value = "task-1951171" [ 1868.444032] env[63024]: _type = "Task" [ 1868.444032] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1868.446826] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3a2d40d1-5d48-4100-8b49-bb24717ed551 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Releasing lock "refresh_cache-1709d916-d0c4-4706-b41b-8b0ed25f3331" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1868.446826] env[63024]: DEBUG nova.objects.instance [None req-3a2d40d1-5d48-4100-8b49-bb24717ed551 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lazy-loading 'migration_context' on Instance uuid 1709d916-d0c4-4706-b41b-8b0ed25f3331 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1868.460596] env[63024]: DEBUG oslo_vmware.api [None req-7f1a3ebc-c409-4527-97cf-a41ee31c1b10 tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Task: {'id': task-1951171, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.480489] env[63024]: DEBUG nova.compute.manager [req-d0dcd8da-ddfd-4746-9847-534092e84cca req-84f67c21-695b-4e6f-a582-772359b0a213 service nova] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Received event network-vif-deleted-f660bb87-9d24-492b-adaf-d1471c95249a {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1868.480489] env[63024]: INFO nova.compute.manager [req-d0dcd8da-ddfd-4746-9847-534092e84cca req-84f67c21-695b-4e6f-a582-772359b0a213 service nova] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Neutron deleted interface f660bb87-9d24-492b-adaf-d1471c95249a; detaching it from the instance and deleting it from the info cache [ 1868.480489] env[63024]: DEBUG nova.network.neutron [req-d0dcd8da-ddfd-4746-9847-534092e84cca req-84f67c21-695b-4e6f-a582-772359b0a213 service nova] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1868.546775] env[63024]: DEBUG nova.compute.utils [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1868.549151] env[63024]: DEBUG nova.compute.manager [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1868.549411] env[63024]: DEBUG nova.network.neutron [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1868.556682] env[63024]: DEBUG nova.compute.manager [None req-9858a7ec-7d80-4734-a8c9-df8a139f53ee tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1868.557215] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9858a7ec-7d80-4734-a8c9-df8a139f53ee tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1868.558764] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac7ba18e-f183-44b6-9a14-7821fe2f1664 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.567996] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-9858a7ec-7d80-4734-a8c9-df8a139f53ee tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1868.568260] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9d7bacf5-80f9-4847-aa42-34c284441c0e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.576361] env[63024]: DEBUG oslo_vmware.api [None req-9858a7ec-7d80-4734-a8c9-df8a139f53ee tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Waiting for the task: (returnval){ [ 1868.576361] env[63024]: value = "task-1951172" [ 1868.576361] env[63024]: _type = "Task" [ 1868.576361] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1868.590030] env[63024]: DEBUG oslo_vmware.api [None req-9858a7ec-7d80-4734-a8c9-df8a139f53ee tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1951172, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.598483] env[63024]: DEBUG nova.policy [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6fc84a6eed984429b26a693ce7b0876e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9521048e807c4ca2a6d2e74a72b829a3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1868.658920] env[63024]: DEBUG oslo_vmware.api [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Task: {'id': task-1951169, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.809219] env[63024]: DEBUG oslo_vmware.api [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951167, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.552013} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1868.809565] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 01b8072a-4483-4932-8294-7e5b48e6b203/01b8072a-4483-4932-8294-7e5b48e6b203.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1868.809760] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1868.810319] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5f12da11-1c53-4bca-847b-b3c19b90c74b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.817994] env[63024]: DEBUG oslo_vmware.api [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1868.817994] env[63024]: value = "task-1951173" [ 1868.817994] env[63024]: _type = "Task" [ 1868.817994] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1868.828719] env[63024]: DEBUG oslo_vmware.api [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951173, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1868.870408] env[63024]: DEBUG nova.compute.manager [req-19931458-89ca-41de-8b6d-a467ec91e828 req-4fb50d5e-dd39-4f91-8d7e-239b5a1ba541 service nova] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Received event network-vif-deleted-accf6d7d-a9ad-4eb7-bdee-7937675b2370 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1868.870615] env[63024]: INFO nova.compute.manager [req-19931458-89ca-41de-8b6d-a467ec91e828 req-4fb50d5e-dd39-4f91-8d7e-239b5a1ba541 service nova] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Neutron deleted interface accf6d7d-a9ad-4eb7-bdee-7937675b2370; detaching it from the instance and deleting it from the info cache [ 1868.870788] env[63024]: DEBUG nova.network.neutron [req-19931458-89ca-41de-8b6d-a467ec91e828 req-4fb50d5e-dd39-4f91-8d7e-239b5a1ba541 service nova] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1868.897577] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 3815d381-760d-40fc-98cf-8e6af287007f] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 1868.916623] env[63024]: DEBUG nova.network.neutron [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Successfully created port: 989997b7-12bd-4924-97e2-a65914c47536 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1868.955478] env[63024]: DEBUG nova.objects.base [None req-3a2d40d1-5d48-4100-8b49-bb24717ed551 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Object Instance<1709d916-d0c4-4706-b41b-8b0ed25f3331> lazy-loaded attributes: info_cache,migration_context {{(pid=63024) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1868.955805] env[63024]: DEBUG nova.network.neutron [-] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1868.956900] env[63024]: DEBUG oslo_vmware.api [None req-7f1a3ebc-c409-4527-97cf-a41ee31c1b10 tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Task: {'id': task-1951171, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.256119} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1868.957957] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82f19946-791f-4f57-aae9-e9c3790b4eb3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.961023] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-7f1a3ebc-c409-4527-97cf-a41ee31c1b10 tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1868.961023] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-7f1a3ebc-c409-4527-97cf-a41ee31c1b10 tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1868.961207] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-7f1a3ebc-c409-4527-97cf-a41ee31c1b10 tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1868.961419] env[63024]: INFO nova.compute.manager [None req-7f1a3ebc-c409-4527-97cf-a41ee31c1b10 tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Took 1.76 seconds to destroy the instance on the hypervisor. [ 1868.961651] env[63024]: DEBUG oslo.service.loopingcall [None req-7f1a3ebc-c409-4527-97cf-a41ee31c1b10 tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1868.961842] env[63024]: DEBUG nova.compute.manager [-] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1868.961932] env[63024]: DEBUG nova.network.neutron [-] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1868.980272] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62cbe414-f799-45b8-b3aa-2694a089685c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.983249] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-446c6e24-049e-4d1e-af33-429f0e107ea2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1868.991178] env[63024]: DEBUG oslo_vmware.api [None req-3a2d40d1-5d48-4100-8b49-bb24717ed551 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1868.991178] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]524602fa-d6f8-2180-208d-d28fe95b370f" [ 1868.991178] env[63024]: _type = "Task" [ 1868.991178] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1868.997238] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b6b6bbe-fab5-4b73-8dd6-5b19925d174c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.010476] env[63024]: DEBUG oslo_vmware.api [None req-3a2d40d1-5d48-4100-8b49-bb24717ed551 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]524602fa-d6f8-2180-208d-d28fe95b370f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.039775] env[63024]: DEBUG nova.compute.manager [req-d0dcd8da-ddfd-4746-9847-534092e84cca req-84f67c21-695b-4e6f-a582-772359b0a213 service nova] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Detach interface failed, port_id=f660bb87-9d24-492b-adaf-d1471c95249a, reason: Instance 52c17abc-78f0-417b-8675-e8d62bc8baa3 could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 1869.050017] env[63024]: DEBUG nova.compute.manager [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1869.086883] env[63024]: DEBUG oslo_vmware.api [None req-9858a7ec-7d80-4734-a8c9-df8a139f53ee tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1951172, 'name': PowerOffVM_Task, 'duration_secs': 0.250774} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1869.086883] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-9858a7ec-7d80-4734-a8c9-df8a139f53ee tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1869.087026] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9858a7ec-7d80-4734-a8c9-df8a139f53ee tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1869.087296] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3708e6e8-0ada-41c2-8e4c-db1a4ce1710a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.159129] env[63024]: DEBUG oslo_vmware.api [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Task: {'id': task-1951169, 'name': PowerOnVM_Task, 'duration_secs': 0.606375} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1869.161484] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1869.161700] env[63024]: INFO nova.compute.manager [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Took 9.19 seconds to spawn the instance on the hypervisor. [ 1869.161878] env[63024]: DEBUG nova.compute.manager [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1869.162856] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b52f9d64-ede7-497b-a142-f9e5980d0045 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.210212] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9858a7ec-7d80-4734-a8c9-df8a139f53ee tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1869.210553] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9858a7ec-7d80-4734-a8c9-df8a139f53ee tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1869.210553] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-9858a7ec-7d80-4734-a8c9-df8a139f53ee tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Deleting the datastore file [datastore1] f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1869.214310] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-26f4a8fd-2617-4a6a-8780-1d3b616821b6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.218294] env[63024]: DEBUG oslo_vmware.api [None req-9858a7ec-7d80-4734-a8c9-df8a139f53ee tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Waiting for the task: (returnval){ [ 1869.218294] env[63024]: value = "task-1951175" [ 1869.218294] env[63024]: _type = "Task" [ 1869.218294] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1869.238028] env[63024]: DEBUG oslo_vmware.api [None req-9858a7ec-7d80-4734-a8c9-df8a139f53ee tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1951175, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.266555] env[63024]: DEBUG nova.network.neutron [-] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1869.328376] env[63024]: DEBUG oslo_vmware.api [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951173, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.222296} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1869.331066] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1869.332125] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7831bc3-b384-463b-8d9c-80483638a717 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.355018] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Reconfiguring VM instance instance-00000048 to attach disk [datastore1] 01b8072a-4483-4932-8294-7e5b48e6b203/01b8072a-4483-4932-8294-7e5b48e6b203.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1869.357813] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-48b05cb1-1ee8-44bf-9cb7-950b008778a6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.374670] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-99f37321-2f33-4c21-a61b-395e0bea9714 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.378193] env[63024]: DEBUG oslo_vmware.api [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1869.378193] env[63024]: value = "task-1951176" [ 1869.378193] env[63024]: _type = "Task" [ 1869.378193] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1869.387583] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f970165-4902-4b05-901e-729278443f96 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.402895] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 8edc24d6-9073-4836-b14b-422df3ac1b88] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 1869.404768] env[63024]: DEBUG oslo_vmware.api [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951176, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.438066] env[63024]: DEBUG nova.compute.manager [req-19931458-89ca-41de-8b6d-a467ec91e828 req-4fb50d5e-dd39-4f91-8d7e-239b5a1ba541 service nova] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Detach interface failed, port_id=accf6d7d-a9ad-4eb7-bdee-7937675b2370, reason: Instance e1be531c-e849-42ac-8319-5bd453a7a562 could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 1869.458615] env[63024]: INFO nova.compute.manager [-] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Took 1.29 seconds to deallocate network for instance. [ 1869.501446] env[63024]: DEBUG oslo_vmware.api [None req-3a2d40d1-5d48-4100-8b49-bb24717ed551 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]524602fa-d6f8-2180-208d-d28fe95b370f, 'name': SearchDatastore_Task, 'duration_secs': 0.009626} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1869.504028] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3a2d40d1-5d48-4100-8b49-bb24717ed551 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1869.655259] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-599854cb-a43e-4496-a221-2f1bd7395f08 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.664947] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0d445db-06fc-4b75-ae1d-ca977aec7d0a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.703801] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8799d160-6268-47aa-8f98-1b3614f62b83 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.709162] env[63024]: INFO nova.compute.manager [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Took 42.90 seconds to build instance. [ 1869.714704] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9314bfc-444a-448d-bac3-2e21a5b87dc2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.729450] env[63024]: DEBUG nova.compute.provider_tree [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1869.740861] env[63024]: DEBUG oslo_vmware.api [None req-9858a7ec-7d80-4734-a8c9-df8a139f53ee tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Task: {'id': task-1951175, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.475278} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1869.741107] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-9858a7ec-7d80-4734-a8c9-df8a139f53ee tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1869.741310] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9858a7ec-7d80-4734-a8c9-df8a139f53ee tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1869.741496] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9858a7ec-7d80-4734-a8c9-df8a139f53ee tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1869.741660] env[63024]: INFO nova.compute.manager [None req-9858a7ec-7d80-4734-a8c9-df8a139f53ee tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Took 1.18 seconds to destroy the instance on the hypervisor. [ 1869.741889] env[63024]: DEBUG oslo.service.loopingcall [None req-9858a7ec-7d80-4734-a8c9-df8a139f53ee tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1869.742084] env[63024]: DEBUG nova.compute.manager [-] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1869.742180] env[63024]: DEBUG nova.network.neutron [-] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1869.746351] env[63024]: DEBUG nova.network.neutron [-] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1869.769405] env[63024]: INFO nova.compute.manager [-] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Took 1.45 seconds to deallocate network for instance. [ 1869.890949] env[63024]: DEBUG oslo_vmware.api [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951176, 'name': ReconfigVM_Task, 'duration_secs': 0.340025} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1869.891377] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Reconfigured VM instance instance-00000048 to attach disk [datastore1] 01b8072a-4483-4932-8294-7e5b48e6b203/01b8072a-4483-4932-8294-7e5b48e6b203.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1869.891939] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-320a0676-4d8c-4da9-b51e-7538aa2c01ed {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1869.899891] env[63024]: DEBUG oslo_vmware.api [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1869.899891] env[63024]: value = "task-1951177" [ 1869.899891] env[63024]: _type = "Task" [ 1869.899891] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1869.909636] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 49eb6292-012a-4296-aff8-9c460866a602] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 1869.911516] env[63024]: DEBUG oslo_vmware.api [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951177, 'name': Rename_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1869.968120] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d64135b2-184f-408a-9c8d-81eaf3539cff tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1870.060650] env[63024]: DEBUG nova.compute.manager [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1870.097571] env[63024]: DEBUG nova.virt.hardware [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1870.097571] env[63024]: DEBUG nova.virt.hardware [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1870.097571] env[63024]: DEBUG nova.virt.hardware [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1870.097571] env[63024]: DEBUG nova.virt.hardware [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1870.097571] env[63024]: DEBUG nova.virt.hardware [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1870.097881] env[63024]: DEBUG nova.virt.hardware [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1870.098269] env[63024]: DEBUG nova.virt.hardware [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1870.098269] env[63024]: DEBUG nova.virt.hardware [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1870.098269] env[63024]: DEBUG nova.virt.hardware [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1870.098507] env[63024]: DEBUG nova.virt.hardware [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1870.098570] env[63024]: DEBUG nova.virt.hardware [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1870.099505] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b055b255-3137-474a-b76f-0c334370d115 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.110408] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b532b45a-71b0-4530-b8ce-10a5fa4af805 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.211565] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a3f3848f-7054-489c-9e69-e5ef6c211447 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Lock "77c27741-ee3a-4a8b-bbd3-89759288f7c6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.880s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1870.248968] env[63024]: INFO nova.compute.manager [-] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Took 1.29 seconds to deallocate network for instance. [ 1870.277256] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6a50913e-4588-4852-99fc-35662bc69d8c tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1870.278614] env[63024]: DEBUG nova.scheduler.client.report [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Updated inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with generation 108 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1870.279542] env[63024]: DEBUG nova.compute.provider_tree [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Updating resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b generation from 108 to 109 during operation: update_inventory {{(pid=63024) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1870.279542] env[63024]: DEBUG nova.compute.provider_tree [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1870.412589] env[63024]: DEBUG oslo_vmware.api [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951177, 'name': Rename_Task, 'duration_secs': 0.244571} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1870.413258] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: f6fddc23-ad36-4d6f-82a2-ded456b2596e] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 1870.415081] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1870.415570] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-816658d1-56ea-4bea-985a-67998aab2662 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1870.425120] env[63024]: DEBUG oslo_vmware.api [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1870.425120] env[63024]: value = "task-1951178" [ 1870.425120] env[63024]: _type = "Task" [ 1870.425120] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1870.437550] env[63024]: DEBUG oslo_vmware.api [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951178, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.460667] env[63024]: DEBUG nova.network.neutron [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Successfully updated port: 989997b7-12bd-4924-97e2-a65914c47536 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1870.510197] env[63024]: DEBUG nova.compute.manager [req-2bd030b8-7910-4c5a-920b-aa52d5b4381e req-7109d163-49df-4bb9-a9b3-561190ba081c service nova] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Received event network-vif-plugged-989997b7-12bd-4924-97e2-a65914c47536 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1870.510436] env[63024]: DEBUG oslo_concurrency.lockutils [req-2bd030b8-7910-4c5a-920b-aa52d5b4381e req-7109d163-49df-4bb9-a9b3-561190ba081c service nova] Acquiring lock "9a7f4452-ae50-4779-8474-11d3a6d3533f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1870.510808] env[63024]: DEBUG oslo_concurrency.lockutils [req-2bd030b8-7910-4c5a-920b-aa52d5b4381e req-7109d163-49df-4bb9-a9b3-561190ba081c service nova] Lock "9a7f4452-ae50-4779-8474-11d3a6d3533f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1870.510983] env[63024]: DEBUG oslo_concurrency.lockutils [req-2bd030b8-7910-4c5a-920b-aa52d5b4381e req-7109d163-49df-4bb9-a9b3-561190ba081c service nova] Lock "9a7f4452-ae50-4779-8474-11d3a6d3533f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1870.511197] env[63024]: DEBUG nova.compute.manager [req-2bd030b8-7910-4c5a-920b-aa52d5b4381e req-7109d163-49df-4bb9-a9b3-561190ba081c service nova] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] No waiting events found dispatching network-vif-plugged-989997b7-12bd-4924-97e2-a65914c47536 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1870.511397] env[63024]: WARNING nova.compute.manager [req-2bd030b8-7910-4c5a-920b-aa52d5b4381e req-7109d163-49df-4bb9-a9b3-561190ba081c service nova] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Received unexpected event network-vif-plugged-989997b7-12bd-4924-97e2-a65914c47536 for instance with vm_state building and task_state spawning. [ 1870.511581] env[63024]: DEBUG nova.compute.manager [req-2bd030b8-7910-4c5a-920b-aa52d5b4381e req-7109d163-49df-4bb9-a9b3-561190ba081c service nova] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Received event network-changed-989997b7-12bd-4924-97e2-a65914c47536 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1870.511730] env[63024]: DEBUG nova.compute.manager [req-2bd030b8-7910-4c5a-920b-aa52d5b4381e req-7109d163-49df-4bb9-a9b3-561190ba081c service nova] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Refreshing instance network info cache due to event network-changed-989997b7-12bd-4924-97e2-a65914c47536. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1870.511921] env[63024]: DEBUG oslo_concurrency.lockutils [req-2bd030b8-7910-4c5a-920b-aa52d5b4381e req-7109d163-49df-4bb9-a9b3-561190ba081c service nova] Acquiring lock "refresh_cache-9a7f4452-ae50-4779-8474-11d3a6d3533f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1870.512089] env[63024]: DEBUG oslo_concurrency.lockutils [req-2bd030b8-7910-4c5a-920b-aa52d5b4381e req-7109d163-49df-4bb9-a9b3-561190ba081c service nova] Acquired lock "refresh_cache-9a7f4452-ae50-4779-8474-11d3a6d3533f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1870.512236] env[63024]: DEBUG nova.network.neutron [req-2bd030b8-7910-4c5a-920b-aa52d5b4381e req-7109d163-49df-4bb9-a9b3-561190ba081c service nova] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Refreshing network info cache for port 989997b7-12bd-4924-97e2-a65914c47536 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1870.605915] env[63024]: DEBUG nova.network.neutron [-] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1870.755743] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7f1a3ebc-c409-4527-97cf-a41ee31c1b10 tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1870.784513] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.739s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1870.785165] env[63024]: DEBUG nova.compute.manager [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1870.788520] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.100s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1870.789361] env[63024]: INFO nova.compute.claims [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1870.916497] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 9679a1a2-b003-4a60-a812-8b3a9b5f545f] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 1870.936298] env[63024]: DEBUG oslo_vmware.api [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951178, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1870.944348] env[63024]: DEBUG nova.compute.manager [req-982657fb-e88d-4c62-bf1e-885820cafbda req-039ff67d-0837-493a-9e87-2cabd037753e service nova] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Received event network-vif-deleted-9241a9a2-d60f-4c1f-a092-5caa7b738112 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1870.944548] env[63024]: DEBUG nova.compute.manager [req-982657fb-e88d-4c62-bf1e-885820cafbda req-039ff67d-0837-493a-9e87-2cabd037753e service nova] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Received event network-vif-deleted-90fdf5d2-f22d-4b1f-9b65-a0975e5c1cd3 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1870.944713] env[63024]: DEBUG nova.compute.manager [req-982657fb-e88d-4c62-bf1e-885820cafbda req-039ff67d-0837-493a-9e87-2cabd037753e service nova] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Received event network-changed-7cedcfa3-1f00-4ebd-88b4-207d64c14235 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1870.944867] env[63024]: DEBUG nova.compute.manager [req-982657fb-e88d-4c62-bf1e-885820cafbda req-039ff67d-0837-493a-9e87-2cabd037753e service nova] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Refreshing instance network info cache due to event network-changed-7cedcfa3-1f00-4ebd-88b4-207d64c14235. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1870.945093] env[63024]: DEBUG oslo_concurrency.lockutils [req-982657fb-e88d-4c62-bf1e-885820cafbda req-039ff67d-0837-493a-9e87-2cabd037753e service nova] Acquiring lock "refresh_cache-77c27741-ee3a-4a8b-bbd3-89759288f7c6" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1870.945239] env[63024]: DEBUG oslo_concurrency.lockutils [req-982657fb-e88d-4c62-bf1e-885820cafbda req-039ff67d-0837-493a-9e87-2cabd037753e service nova] Acquired lock "refresh_cache-77c27741-ee3a-4a8b-bbd3-89759288f7c6" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1870.945408] env[63024]: DEBUG nova.network.neutron [req-982657fb-e88d-4c62-bf1e-885820cafbda req-039ff67d-0837-493a-9e87-2cabd037753e service nova] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Refreshing network info cache for port 7cedcfa3-1f00-4ebd-88b4-207d64c14235 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1870.963774] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "refresh_cache-9a7f4452-ae50-4779-8474-11d3a6d3533f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1871.049722] env[63024]: DEBUG nova.network.neutron [req-2bd030b8-7910-4c5a-920b-aa52d5b4381e req-7109d163-49df-4bb9-a9b3-561190ba081c service nova] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1871.109410] env[63024]: INFO nova.compute.manager [-] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Took 1.37 seconds to deallocate network for instance. [ 1871.135074] env[63024]: DEBUG nova.network.neutron [req-2bd030b8-7910-4c5a-920b-aa52d5b4381e req-7109d163-49df-4bb9-a9b3-561190ba081c service nova] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1871.296855] env[63024]: DEBUG nova.compute.utils [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1871.298291] env[63024]: DEBUG nova.compute.manager [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1871.298462] env[63024]: DEBUG nova.network.neutron [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1871.342872] env[63024]: DEBUG nova.policy [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '28fab1e92c1d4491986100983f6b4ab1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6072e8931d9540ad8fe4a2b4b1ec782d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1871.421989] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: cc5cfa6d-d3db-4997-8413-2460e1124f02] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 1871.437894] env[63024]: DEBUG oslo_vmware.api [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951178, 'name': PowerOnVM_Task, 'duration_secs': 0.966825} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1871.437894] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1871.437894] env[63024]: INFO nova.compute.manager [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Took 9.84 seconds to spawn the instance on the hypervisor. [ 1871.437894] env[63024]: DEBUG nova.compute.manager [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1871.438825] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fbbe9cb-7636-4f6f-a4a0-737afde0620d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1871.617639] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9858a7ec-7d80-4734-a8c9-df8a139f53ee tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1871.642395] env[63024]: DEBUG oslo_concurrency.lockutils [req-2bd030b8-7910-4c5a-920b-aa52d5b4381e req-7109d163-49df-4bb9-a9b3-561190ba081c service nova] Releasing lock "refresh_cache-9a7f4452-ae50-4779-8474-11d3a6d3533f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1871.642796] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquired lock "refresh_cache-9a7f4452-ae50-4779-8474-11d3a6d3533f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1871.642982] env[63024]: DEBUG nova.network.neutron [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1871.749593] env[63024]: DEBUG nova.network.neutron [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Successfully created port: c48bb2e4-b1fe-46e8-9eaf-75fcb7827e8c {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1871.802420] env[63024]: DEBUG nova.compute.manager [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1871.832754] env[63024]: DEBUG nova.network.neutron [req-982657fb-e88d-4c62-bf1e-885820cafbda req-039ff67d-0837-493a-9e87-2cabd037753e service nova] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Updated VIF entry in instance network info cache for port 7cedcfa3-1f00-4ebd-88b4-207d64c14235. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1871.833111] env[63024]: DEBUG nova.network.neutron [req-982657fb-e88d-4c62-bf1e-885820cafbda req-039ff67d-0837-493a-9e87-2cabd037753e service nova] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Updating instance_info_cache with network_info: [{"id": "7cedcfa3-1f00-4ebd-88b4-207d64c14235", "address": "fa:16:3e:9c:8b:1f", "network": {"id": "384d05e3-ef53-40f3-8a75-21f850df070c", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-411167579-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e138433d59374418952a186a4d2a0f78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7cedcfa3-1f", "ovs_interfaceid": "7cedcfa3-1f00-4ebd-88b4-207d64c14235", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1871.925597] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 28b3bfc7-2bed-4941-9f48-8bd301e1a971] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 1871.963500] env[63024]: INFO nova.compute.manager [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Took 41.93 seconds to build instance. [ 1872.191430] env[63024]: DEBUG nova.network.neutron [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1872.341973] env[63024]: DEBUG oslo_concurrency.lockutils [req-982657fb-e88d-4c62-bf1e-885820cafbda req-039ff67d-0837-493a-9e87-2cabd037753e service nova] Releasing lock "refresh_cache-77c27741-ee3a-4a8b-bbd3-89759288f7c6" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1872.362984] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a989e9b-b4ca-43fb-bf0c-ae95291b046c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.371800] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8edae16-ea9c-40c7-aeb0-fd33bb39554e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.411420] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02c0af1a-7aa1-4cb3-aaf5-b6079669a9cc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.420967] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33d645ba-9f36-4815-bf38-66bd78642428 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.435914] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 94d9210e-ca8d-4ef1-a640-2d9a11ad87d3] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 1872.438604] env[63024]: DEBUG nova.compute.provider_tree [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1872.466427] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a198e7ba-cdb6-4c3d-88bd-8520e0ae5498 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Lock "01b8072a-4483-4932-8294-7e5b48e6b203" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.415s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1872.474439] env[63024]: DEBUG nova.network.neutron [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Updating instance_info_cache with network_info: [{"id": "989997b7-12bd-4924-97e2-a65914c47536", "address": "fa:16:3e:3b:9f:01", "network": {"id": "ffb24eaf-c6b6-414f-a69a-0c8806712ddd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-281590202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9521048e807c4ca2a6d2e74a72b829a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap989997b7-12", "ovs_interfaceid": "989997b7-12bd-4924-97e2-a65914c47536", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1872.618063] env[63024]: DEBUG nova.compute.manager [req-c2a08c9a-02a7-4ef5-a7f8-b4e81e5b9ff3 req-5e8d0c61-a22d-4a2a-a66d-40ed1ec976ef service nova] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Received event network-changed-7d713c35-a0bd-4dd8-b1a9-b838b3bc7a46 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1872.618432] env[63024]: DEBUG nova.compute.manager [req-c2a08c9a-02a7-4ef5-a7f8-b4e81e5b9ff3 req-5e8d0c61-a22d-4a2a-a66d-40ed1ec976ef service nova] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Refreshing instance network info cache due to event network-changed-7d713c35-a0bd-4dd8-b1a9-b838b3bc7a46. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1872.618957] env[63024]: DEBUG oslo_concurrency.lockutils [req-c2a08c9a-02a7-4ef5-a7f8-b4e81e5b9ff3 req-5e8d0c61-a22d-4a2a-a66d-40ed1ec976ef service nova] Acquiring lock "refresh_cache-01b8072a-4483-4932-8294-7e5b48e6b203" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1872.619559] env[63024]: DEBUG oslo_concurrency.lockutils [req-c2a08c9a-02a7-4ef5-a7f8-b4e81e5b9ff3 req-5e8d0c61-a22d-4a2a-a66d-40ed1ec976ef service nova] Acquired lock "refresh_cache-01b8072a-4483-4932-8294-7e5b48e6b203" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1872.619923] env[63024]: DEBUG nova.network.neutron [req-c2a08c9a-02a7-4ef5-a7f8-b4e81e5b9ff3 req-5e8d0c61-a22d-4a2a-a66d-40ed1ec976ef service nova] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Refreshing network info cache for port 7d713c35-a0bd-4dd8-b1a9-b838b3bc7a46 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1872.816194] env[63024]: DEBUG nova.compute.manager [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1872.843239] env[63024]: DEBUG nova.virt.hardware [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1872.843494] env[63024]: DEBUG nova.virt.hardware [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1872.843669] env[63024]: DEBUG nova.virt.hardware [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1872.843865] env[63024]: DEBUG nova.virt.hardware [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1872.844025] env[63024]: DEBUG nova.virt.hardware [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1872.845080] env[63024]: DEBUG nova.virt.hardware [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1872.845080] env[63024]: DEBUG nova.virt.hardware [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1872.845080] env[63024]: DEBUG nova.virt.hardware [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1872.846253] env[63024]: DEBUG nova.virt.hardware [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1872.846492] env[63024]: DEBUG nova.virt.hardware [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1872.847029] env[63024]: DEBUG nova.virt.hardware [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1872.850072] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9c7fb9c-4dc4-480f-b3ac-c45b1145b0d7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.858184] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3c88e0f-3656-4198-a08e-c4b0e7e44a40 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1872.940144] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 18444b47-476a-4ca3-9a4f-0dc58e652143] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 1872.942631] env[63024]: DEBUG nova.scheduler.client.report [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1872.977402] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Releasing lock "refresh_cache-9a7f4452-ae50-4779-8474-11d3a6d3533f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1872.977757] env[63024]: DEBUG nova.compute.manager [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Instance network_info: |[{"id": "989997b7-12bd-4924-97e2-a65914c47536", "address": "fa:16:3e:3b:9f:01", "network": {"id": "ffb24eaf-c6b6-414f-a69a-0c8806712ddd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-281590202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9521048e807c4ca2a6d2e74a72b829a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap989997b7-12", "ovs_interfaceid": "989997b7-12bd-4924-97e2-a65914c47536", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1872.978170] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3b:9f:01', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e445fb59-822c-4d7d-943b-c8e3bbaca62e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '989997b7-12bd-4924-97e2-a65914c47536', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1872.985909] env[63024]: DEBUG oslo.service.loopingcall [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1872.986408] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1872.986643] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8c09de95-170c-47c9-9e0d-7c322067c098 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.009357] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1873.009357] env[63024]: value = "task-1951179" [ 1873.009357] env[63024]: _type = "Task" [ 1873.009357] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1873.018284] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951179, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.025213] env[63024]: DEBUG nova.compute.manager [req-397af1d8-34b1-4078-8531-29acbab785a1 req-1a7f2ad7-bea3-468b-ba53-9649ec6de834 service nova] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Received event network-changed-7cedcfa3-1f00-4ebd-88b4-207d64c14235 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1873.025409] env[63024]: DEBUG nova.compute.manager [req-397af1d8-34b1-4078-8531-29acbab785a1 req-1a7f2ad7-bea3-468b-ba53-9649ec6de834 service nova] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Refreshing instance network info cache due to event network-changed-7cedcfa3-1f00-4ebd-88b4-207d64c14235. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1873.025620] env[63024]: DEBUG oslo_concurrency.lockutils [req-397af1d8-34b1-4078-8531-29acbab785a1 req-1a7f2ad7-bea3-468b-ba53-9649ec6de834 service nova] Acquiring lock "refresh_cache-77c27741-ee3a-4a8b-bbd3-89759288f7c6" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1873.025756] env[63024]: DEBUG oslo_concurrency.lockutils [req-397af1d8-34b1-4078-8531-29acbab785a1 req-1a7f2ad7-bea3-468b-ba53-9649ec6de834 service nova] Acquired lock "refresh_cache-77c27741-ee3a-4a8b-bbd3-89759288f7c6" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1873.025907] env[63024]: DEBUG nova.network.neutron [req-397af1d8-34b1-4078-8531-29acbab785a1 req-1a7f2ad7-bea3-468b-ba53-9649ec6de834 service nova] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Refreshing network info cache for port 7cedcfa3-1f00-4ebd-88b4-207d64c14235 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1873.301548] env[63024]: DEBUG nova.network.neutron [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Successfully updated port: c48bb2e4-b1fe-46e8-9eaf-75fcb7827e8c {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1873.447277] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 8a826350-0fee-409d-a3fc-260d7d43bdf6] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 1873.450144] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.662s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1873.450642] env[63024]: DEBUG nova.compute.manager [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1873.454161] env[63024]: DEBUG oslo_concurrency.lockutils [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.655s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1873.455841] env[63024]: INFO nova.compute.claims [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1873.514973] env[63024]: DEBUG nova.network.neutron [req-c2a08c9a-02a7-4ef5-a7f8-b4e81e5b9ff3 req-5e8d0c61-a22d-4a2a-a66d-40ed1ec976ef service nova] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Updated VIF entry in instance network info cache for port 7d713c35-a0bd-4dd8-b1a9-b838b3bc7a46. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1873.515355] env[63024]: DEBUG nova.network.neutron [req-c2a08c9a-02a7-4ef5-a7f8-b4e81e5b9ff3 req-5e8d0c61-a22d-4a2a-a66d-40ed1ec976ef service nova] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Updating instance_info_cache with network_info: [{"id": "7d713c35-a0bd-4dd8-b1a9-b838b3bc7a46", "address": "fa:16:3e:9e:4f:a9", "network": {"id": "20e77237-56c4-40bb-8203-aa035239c938", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1073367986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.156", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12d782556c614caf84a51b37fa43b5de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a64108f9-df0a-4feb-bbb5-97f5841c356c", "external-id": "nsx-vlan-transportzone-67", "segmentation_id": 67, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d713c35-a0", "ovs_interfaceid": "7d713c35-a0bd-4dd8-b1a9-b838b3bc7a46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1873.522915] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951179, 'name': CreateVM_Task, 'duration_secs': 0.374281} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1873.523466] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1873.524215] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1873.524381] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1873.524731] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1873.524989] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e2ffc46-e4b6-4997-80d1-6c31a6a7e9ef {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1873.532391] env[63024]: DEBUG oslo_vmware.api [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 1873.532391] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]526d06b9-41b7-1d80-842a-e487fc5009ba" [ 1873.532391] env[63024]: _type = "Task" [ 1873.532391] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1873.543318] env[63024]: DEBUG oslo_vmware.api [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]526d06b9-41b7-1d80-842a-e487fc5009ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1873.808375] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "refresh_cache-0f371c69-c7ae-4649-b038-be82e8ca74e1" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1873.808534] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquired lock "refresh_cache-0f371c69-c7ae-4649-b038-be82e8ca74e1" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1873.808684] env[63024]: DEBUG nova.network.neutron [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1873.954694] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 610dd030-5080-498a-8744-b1411297d70d] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 1873.961392] env[63024]: DEBUG nova.compute.utils [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1873.966991] env[63024]: DEBUG nova.compute.manager [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1873.967182] env[63024]: DEBUG nova.network.neutron [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1874.013798] env[63024]: DEBUG nova.policy [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4a59445f732e4801b5e6020b488adb59', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5577b40f56af44eebd47761192e9510f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1874.018011] env[63024]: DEBUG oslo_concurrency.lockutils [req-c2a08c9a-02a7-4ef5-a7f8-b4e81e5b9ff3 req-5e8d0c61-a22d-4a2a-a66d-40ed1ec976ef service nova] Releasing lock "refresh_cache-01b8072a-4483-4932-8294-7e5b48e6b203" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1874.046032] env[63024]: DEBUG oslo_vmware.api [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]526d06b9-41b7-1d80-842a-e487fc5009ba, 'name': SearchDatastore_Task, 'duration_secs': 0.011327} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1874.046032] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1874.046211] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1874.046465] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1874.046618] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1874.046807] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1874.047457] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-83e8d57b-d407-4215-93d6-79e7963d86a1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.059008] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1874.059259] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1874.060395] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bae0420b-d6ad-4330-815f-12f01b8862cd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.067540] env[63024]: DEBUG oslo_vmware.api [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 1874.067540] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52118f7c-df3d-2f43-165d-8190d67fe2ae" [ 1874.067540] env[63024]: _type = "Task" [ 1874.067540] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1874.083052] env[63024]: DEBUG oslo_vmware.api [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52118f7c-df3d-2f43-165d-8190d67fe2ae, 'name': SearchDatastore_Task, 'duration_secs': 0.011582} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1874.083965] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ea71b05-33d9-417b-8a65-9585e82bf5ac {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.090687] env[63024]: DEBUG oslo_vmware.api [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 1874.090687] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]528652db-d18f-43ed-90c2-9f897059e615" [ 1874.090687] env[63024]: _type = "Task" [ 1874.090687] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1874.103023] env[63024]: DEBUG oslo_vmware.api [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]528652db-d18f-43ed-90c2-9f897059e615, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.133080] env[63024]: DEBUG nova.network.neutron [req-397af1d8-34b1-4078-8531-29acbab785a1 req-1a7f2ad7-bea3-468b-ba53-9649ec6de834 service nova] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Updated VIF entry in instance network info cache for port 7cedcfa3-1f00-4ebd-88b4-207d64c14235. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1874.133617] env[63024]: DEBUG nova.network.neutron [req-397af1d8-34b1-4078-8531-29acbab785a1 req-1a7f2ad7-bea3-468b-ba53-9649ec6de834 service nova] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Updating instance_info_cache with network_info: [{"id": "7cedcfa3-1f00-4ebd-88b4-207d64c14235", "address": "fa:16:3e:9c:8b:1f", "network": {"id": "384d05e3-ef53-40f3-8a75-21f850df070c", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-411167579-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e138433d59374418952a186a4d2a0f78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7cedcfa3-1f", "ovs_interfaceid": "7cedcfa3-1f00-4ebd-88b4-207d64c14235", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1874.351151] env[63024]: DEBUG nova.network.neutron [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1874.353734] env[63024]: DEBUG nova.network.neutron [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Successfully created port: 360ebc0c-7601-4e8c-87a5-65b79b2ae569 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1874.458541] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 1ad97ed0-2a84-4783-8511-e0f6b24861bd] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 1874.468295] env[63024]: DEBUG nova.compute.manager [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1874.585869] env[63024]: DEBUG nova.network.neutron [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Updating instance_info_cache with network_info: [{"id": "c48bb2e4-b1fe-46e8-9eaf-75fcb7827e8c", "address": "fa:16:3e:a7:20:a6", "network": {"id": "0cbe22f7-6322-4d92-9a77-2753f6449a2d", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-366684254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6072e8931d9540ad8fe4a2b4b1ec782d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3ccbdbb-8b49-4a26-913f-2a448b72280f", "external-id": "nsx-vlan-transportzone-412", "segmentation_id": 412, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc48bb2e4-b1", "ovs_interfaceid": "c48bb2e4-b1fe-46e8-9eaf-75fcb7827e8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1874.604244] env[63024]: DEBUG oslo_vmware.api [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]528652db-d18f-43ed-90c2-9f897059e615, 'name': SearchDatastore_Task, 'duration_secs': 0.010884} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1874.606387] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1874.606751] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 9a7f4452-ae50-4779-8474-11d3a6d3533f/9a7f4452-ae50-4779-8474-11d3a6d3533f.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1874.607692] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f50789b4-60ee-46fa-b7de-5c59e8c1dc53 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1874.615836] env[63024]: DEBUG oslo_vmware.api [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 1874.615836] env[63024]: value = "task-1951180" [ 1874.615836] env[63024]: _type = "Task" [ 1874.615836] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1874.626042] env[63024]: DEBUG oslo_vmware.api [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951180, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1874.636132] env[63024]: DEBUG oslo_concurrency.lockutils [req-397af1d8-34b1-4078-8531-29acbab785a1 req-1a7f2ad7-bea3-468b-ba53-9649ec6de834 service nova] Releasing lock "refresh_cache-77c27741-ee3a-4a8b-bbd3-89759288f7c6" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1874.645295] env[63024]: DEBUG nova.compute.manager [req-ce97ced1-4df5-4cdd-8b7c-ba9e26033686 req-a8b87e8b-424e-4ea8-bf39-d1f4d8514ef8 service nova] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Received event network-vif-plugged-c48bb2e4-b1fe-46e8-9eaf-75fcb7827e8c {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1874.645520] env[63024]: DEBUG oslo_concurrency.lockutils [req-ce97ced1-4df5-4cdd-8b7c-ba9e26033686 req-a8b87e8b-424e-4ea8-bf39-d1f4d8514ef8 service nova] Acquiring lock "0f371c69-c7ae-4649-b038-be82e8ca74e1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1874.645705] env[63024]: DEBUG oslo_concurrency.lockutils [req-ce97ced1-4df5-4cdd-8b7c-ba9e26033686 req-a8b87e8b-424e-4ea8-bf39-d1f4d8514ef8 service nova] Lock "0f371c69-c7ae-4649-b038-be82e8ca74e1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1874.645903] env[63024]: DEBUG oslo_concurrency.lockutils [req-ce97ced1-4df5-4cdd-8b7c-ba9e26033686 req-a8b87e8b-424e-4ea8-bf39-d1f4d8514ef8 service nova] Lock "0f371c69-c7ae-4649-b038-be82e8ca74e1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1874.646051] env[63024]: DEBUG nova.compute.manager [req-ce97ced1-4df5-4cdd-8b7c-ba9e26033686 req-a8b87e8b-424e-4ea8-bf39-d1f4d8514ef8 service nova] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] No waiting events found dispatching network-vif-plugged-c48bb2e4-b1fe-46e8-9eaf-75fcb7827e8c {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1874.646216] env[63024]: WARNING nova.compute.manager [req-ce97ced1-4df5-4cdd-8b7c-ba9e26033686 req-a8b87e8b-424e-4ea8-bf39-d1f4d8514ef8 service nova] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Received unexpected event network-vif-plugged-c48bb2e4-b1fe-46e8-9eaf-75fcb7827e8c for instance with vm_state building and task_state spawning. [ 1874.646373] env[63024]: DEBUG nova.compute.manager [req-ce97ced1-4df5-4cdd-8b7c-ba9e26033686 req-a8b87e8b-424e-4ea8-bf39-d1f4d8514ef8 service nova] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Received event network-changed-c48bb2e4-b1fe-46e8-9eaf-75fcb7827e8c {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1874.646523] env[63024]: DEBUG nova.compute.manager [req-ce97ced1-4df5-4cdd-8b7c-ba9e26033686 req-a8b87e8b-424e-4ea8-bf39-d1f4d8514ef8 service nova] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Refreshing instance network info cache due to event network-changed-c48bb2e4-b1fe-46e8-9eaf-75fcb7827e8c. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1874.646684] env[63024]: DEBUG oslo_concurrency.lockutils [req-ce97ced1-4df5-4cdd-8b7c-ba9e26033686 req-a8b87e8b-424e-4ea8-bf39-d1f4d8514ef8 service nova] Acquiring lock "refresh_cache-0f371c69-c7ae-4649-b038-be82e8ca74e1" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1874.962246] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 00e925a1-9b79-46e2-b7f7-c0b63e1e72df] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 1874.987206] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28a32f9b-0c6c-4915-bd93-9e53db0fedae {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.003248] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f46876b6-5b2f-4782-aa07-8457ddd44f7c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.038804] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22751c37-902b-426f-90a7-b970f0ccaf09 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.050918] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6f3cea8-9c43-4134-aa8d-5dbed4eb4b12 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.069779] env[63024]: DEBUG nova.compute.provider_tree [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1875.091421] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Releasing lock "refresh_cache-0f371c69-c7ae-4649-b038-be82e8ca74e1" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1875.091580] env[63024]: DEBUG nova.compute.manager [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Instance network_info: |[{"id": "c48bb2e4-b1fe-46e8-9eaf-75fcb7827e8c", "address": "fa:16:3e:a7:20:a6", "network": {"id": "0cbe22f7-6322-4d92-9a77-2753f6449a2d", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-366684254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6072e8931d9540ad8fe4a2b4b1ec782d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3ccbdbb-8b49-4a26-913f-2a448b72280f", "external-id": "nsx-vlan-transportzone-412", "segmentation_id": 412, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc48bb2e4-b1", "ovs_interfaceid": "c48bb2e4-b1fe-46e8-9eaf-75fcb7827e8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1875.091892] env[63024]: DEBUG oslo_concurrency.lockutils [req-ce97ced1-4df5-4cdd-8b7c-ba9e26033686 req-a8b87e8b-424e-4ea8-bf39-d1f4d8514ef8 service nova] Acquired lock "refresh_cache-0f371c69-c7ae-4649-b038-be82e8ca74e1" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1875.092082] env[63024]: DEBUG nova.network.neutron [req-ce97ced1-4df5-4cdd-8b7c-ba9e26033686 req-a8b87e8b-424e-4ea8-bf39-d1f4d8514ef8 service nova] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Refreshing network info cache for port c48bb2e4-b1fe-46e8-9eaf-75fcb7827e8c {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1875.093325] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a7:20:a6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f3ccbdbb-8b49-4a26-913f-2a448b72280f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c48bb2e4-b1fe-46e8-9eaf-75fcb7827e8c', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1875.100725] env[63024]: DEBUG oslo.service.loopingcall [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1875.101465] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1875.102156] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b2c8a54f-84b9-4dc9-a8f4-8542d2d197f1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.125943] env[63024]: DEBUG oslo_vmware.api [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951180, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1875.127175] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1875.127175] env[63024]: value = "task-1951181" [ 1875.127175] env[63024]: _type = "Task" [ 1875.127175] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1875.134900] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951181, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1875.409338] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Acquiring lock "43cdc362-588f-42cc-a4b2-a08fe60293a5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1875.409599] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Lock "43cdc362-588f-42cc-a4b2-a08fe60293a5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1875.465780] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: ca1e1e82-f4fe-42b5-92b1-6a20b7267d4d] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 1875.480994] env[63024]: DEBUG nova.compute.manager [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1875.513033] env[63024]: DEBUG nova.virt.hardware [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1875.513033] env[63024]: DEBUG nova.virt.hardware [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1875.513033] env[63024]: DEBUG nova.virt.hardware [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1875.513033] env[63024]: DEBUG nova.virt.hardware [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1875.513299] env[63024]: DEBUG nova.virt.hardware [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1875.513299] env[63024]: DEBUG nova.virt.hardware [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1875.513446] env[63024]: DEBUG nova.virt.hardware [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1875.513556] env[63024]: DEBUG nova.virt.hardware [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1875.513724] env[63024]: DEBUG nova.virt.hardware [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1875.513884] env[63024]: DEBUG nova.virt.hardware [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1875.514063] env[63024]: DEBUG nova.virt.hardware [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1875.514909] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2105cfd-084a-4aeb-81e1-d2f4e8ca3122 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.524049] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d22b4256-1524-433c-9308-69913650875b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.572938] env[63024]: DEBUG nova.scheduler.client.report [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1875.628535] env[63024]: DEBUG oslo_vmware.api [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951180, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.571058} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1875.631506] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 9a7f4452-ae50-4779-8474-11d3a6d3533f/9a7f4452-ae50-4779-8474-11d3a6d3533f.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1875.631730] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1875.632023] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-caec65c8-b8a8-4026-9958-1bbcd4a03ba3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1875.639812] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951181, 'name': CreateVM_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1875.640949] env[63024]: DEBUG oslo_vmware.api [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 1875.640949] env[63024]: value = "task-1951182" [ 1875.640949] env[63024]: _type = "Task" [ 1875.640949] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1875.653236] env[63024]: DEBUG oslo_vmware.api [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951182, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1875.823974] env[63024]: DEBUG nova.network.neutron [req-ce97ced1-4df5-4cdd-8b7c-ba9e26033686 req-a8b87e8b-424e-4ea8-bf39-d1f4d8514ef8 service nova] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Updated VIF entry in instance network info cache for port c48bb2e4-b1fe-46e8-9eaf-75fcb7827e8c. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1875.824688] env[63024]: DEBUG nova.network.neutron [req-ce97ced1-4df5-4cdd-8b7c-ba9e26033686 req-a8b87e8b-424e-4ea8-bf39-d1f4d8514ef8 service nova] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Updating instance_info_cache with network_info: [{"id": "c48bb2e4-b1fe-46e8-9eaf-75fcb7827e8c", "address": "fa:16:3e:a7:20:a6", "network": {"id": "0cbe22f7-6322-4d92-9a77-2753f6449a2d", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-366684254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6072e8931d9540ad8fe4a2b4b1ec782d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3ccbdbb-8b49-4a26-913f-2a448b72280f", "external-id": "nsx-vlan-transportzone-412", "segmentation_id": 412, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc48bb2e4-b1", "ovs_interfaceid": "c48bb2e4-b1fe-46e8-9eaf-75fcb7827e8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1875.908361] env[63024]: DEBUG nova.network.neutron [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Successfully updated port: 360ebc0c-7601-4e8c-87a5-65b79b2ae569 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1875.913820] env[63024]: DEBUG nova.compute.manager [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1875.970117] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 85d6db13-d317-498e-a36a-972e9b36e82b] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 1876.078425] env[63024]: DEBUG oslo_concurrency.lockutils [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.624s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1876.078963] env[63024]: DEBUG nova.compute.manager [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1876.081554] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4c52a643-4c61-4c52-aa67-3bf3dd9d7ba4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.382s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1876.081747] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4c52a643-4c61-4c52-aa67-3bf3dd9d7ba4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1876.084638] env[63024]: DEBUG oslo_concurrency.lockutils [None req-805b97dd-a2f3-4df4-a933-7ad58df3b2fd tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.494s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1876.084817] env[63024]: DEBUG oslo_concurrency.lockutils [None req-805b97dd-a2f3-4df4-a933-7ad58df3b2fd tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1876.086734] env[63024]: DEBUG oslo_concurrency.lockutils [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.544s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1876.088357] env[63024]: INFO nova.compute.claims [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1876.112838] env[63024]: INFO nova.scheduler.client.report [None req-4c52a643-4c61-4c52-aa67-3bf3dd9d7ba4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Deleted allocations for instance c12774e4-77d1-4001-8d5d-0240dfed4ead [ 1876.114613] env[63024]: INFO nova.scheduler.client.report [None req-805b97dd-a2f3-4df4-a933-7ad58df3b2fd tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Deleted allocations for instance 9bf1316e-f1ae-426e-a0a2-d814a2460c4d [ 1876.138825] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951181, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1876.153389] env[63024]: DEBUG oslo_vmware.api [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951182, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069779} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1876.153697] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1876.154611] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd702650-b95f-4e4d-b9f3-1f5feb0a1dbf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.180158] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Reconfiguring VM instance instance-00000049 to attach disk [datastore1] 9a7f4452-ae50-4779-8474-11d3a6d3533f/9a7f4452-ae50-4779-8474-11d3a6d3533f.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1876.180780] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ccba04e4-9a62-4e08-8076-3c88053c9ea7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1876.201910] env[63024]: DEBUG oslo_vmware.api [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 1876.201910] env[63024]: value = "task-1951183" [ 1876.201910] env[63024]: _type = "Task" [ 1876.201910] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1876.211666] env[63024]: DEBUG oslo_vmware.api [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951183, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1876.328719] env[63024]: DEBUG oslo_concurrency.lockutils [req-ce97ced1-4df5-4cdd-8b7c-ba9e26033686 req-a8b87e8b-424e-4ea8-bf39-d1f4d8514ef8 service nova] Releasing lock "refresh_cache-0f371c69-c7ae-4649-b038-be82e8ca74e1" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1876.413583] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquiring lock "refresh_cache-9ca6342c-55bd-4c78-9fa6-3caf4ec744bc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1876.413760] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquired lock "refresh_cache-9ca6342c-55bd-4c78-9fa6-3caf4ec744bc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1876.413914] env[63024]: DEBUG nova.network.neutron [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1876.442268] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1876.471941] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: e3c9e9de-586d-4baa-b4bb-95c41d527a03] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 1876.593968] env[63024]: DEBUG nova.compute.utils [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1876.597845] env[63024]: DEBUG nova.compute.manager [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1876.598012] env[63024]: DEBUG nova.network.neutron [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1876.624575] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4c52a643-4c61-4c52-aa67-3bf3dd9d7ba4 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Lock "c12774e4-77d1-4001-8d5d-0240dfed4ead" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.469s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1876.625669] env[63024]: DEBUG oslo_concurrency.lockutils [None req-805b97dd-a2f3-4df4-a933-7ad58df3b2fd tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "9bf1316e-f1ae-426e-a0a2-d814a2460c4d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.632s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1876.646660] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951181, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1876.656039] env[63024]: DEBUG nova.policy [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3e68362e6da947cc996661188e7629f4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6f87eadd82394447910efa7b71814e97', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1876.713601] env[63024]: DEBUG oslo_vmware.api [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951183, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1876.810632] env[63024]: DEBUG nova.compute.manager [req-790399c9-fe52-4399-98b9-0abf9e7cbd9c req-4dfdb4fe-a05f-4f7c-a8aa-662a2309befd service nova] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Received event network-vif-plugged-360ebc0c-7601-4e8c-87a5-65b79b2ae569 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1876.810725] env[63024]: DEBUG oslo_concurrency.lockutils [req-790399c9-fe52-4399-98b9-0abf9e7cbd9c req-4dfdb4fe-a05f-4f7c-a8aa-662a2309befd service nova] Acquiring lock "9ca6342c-55bd-4c78-9fa6-3caf4ec744bc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1876.811767] env[63024]: DEBUG oslo_concurrency.lockutils [req-790399c9-fe52-4399-98b9-0abf9e7cbd9c req-4dfdb4fe-a05f-4f7c-a8aa-662a2309befd service nova] Lock "9ca6342c-55bd-4c78-9fa6-3caf4ec744bc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1876.811767] env[63024]: DEBUG oslo_concurrency.lockutils [req-790399c9-fe52-4399-98b9-0abf9e7cbd9c req-4dfdb4fe-a05f-4f7c-a8aa-662a2309befd service nova] Lock "9ca6342c-55bd-4c78-9fa6-3caf4ec744bc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1876.811767] env[63024]: DEBUG nova.compute.manager [req-790399c9-fe52-4399-98b9-0abf9e7cbd9c req-4dfdb4fe-a05f-4f7c-a8aa-662a2309befd service nova] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] No waiting events found dispatching network-vif-plugged-360ebc0c-7601-4e8c-87a5-65b79b2ae569 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1876.811767] env[63024]: WARNING nova.compute.manager [req-790399c9-fe52-4399-98b9-0abf9e7cbd9c req-4dfdb4fe-a05f-4f7c-a8aa-662a2309befd service nova] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Received unexpected event network-vif-plugged-360ebc0c-7601-4e8c-87a5-65b79b2ae569 for instance with vm_state building and task_state spawning. [ 1876.811767] env[63024]: DEBUG nova.compute.manager [req-790399c9-fe52-4399-98b9-0abf9e7cbd9c req-4dfdb4fe-a05f-4f7c-a8aa-662a2309befd service nova] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Received event network-changed-360ebc0c-7601-4e8c-87a5-65b79b2ae569 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1876.811918] env[63024]: DEBUG nova.compute.manager [req-790399c9-fe52-4399-98b9-0abf9e7cbd9c req-4dfdb4fe-a05f-4f7c-a8aa-662a2309befd service nova] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Refreshing instance network info cache due to event network-changed-360ebc0c-7601-4e8c-87a5-65b79b2ae569. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1876.812554] env[63024]: DEBUG oslo_concurrency.lockutils [req-790399c9-fe52-4399-98b9-0abf9e7cbd9c req-4dfdb4fe-a05f-4f7c-a8aa-662a2309befd service nova] Acquiring lock "refresh_cache-9ca6342c-55bd-4c78-9fa6-3caf4ec744bc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1876.848048] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7c86cf25-d8d6-4123-928f-07d445feb5a9 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquiring lock "37792b57-3347-4134-a060-53359afa3298" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1876.848048] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7c86cf25-d8d6-4123-928f-07d445feb5a9 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "37792b57-3347-4134-a060-53359afa3298" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1876.848048] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7c86cf25-d8d6-4123-928f-07d445feb5a9 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquiring lock "37792b57-3347-4134-a060-53359afa3298-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1876.848240] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7c86cf25-d8d6-4123-928f-07d445feb5a9 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "37792b57-3347-4134-a060-53359afa3298-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1876.848407] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7c86cf25-d8d6-4123-928f-07d445feb5a9 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "37792b57-3347-4134-a060-53359afa3298-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1876.855501] env[63024]: INFO nova.compute.manager [None req-7c86cf25-d8d6-4123-928f-07d445feb5a9 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 37792b57-3347-4134-a060-53359afa3298] Terminating instance [ 1876.963421] env[63024]: DEBUG nova.network.neutron [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] Successfully created port: 736ac7e2-7a3c-429f-ad94-557967750b24 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1876.976197] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: d49eae54-cccb-4281-aaa0-d6974529eb7b] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 1876.981541] env[63024]: DEBUG nova.network.neutron [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1877.098696] env[63024]: DEBUG nova.compute.manager [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1877.154165] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951181, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1877.216718] env[63024]: DEBUG oslo_vmware.api [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951183, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1877.269114] env[63024]: DEBUG nova.network.neutron [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Updating instance_info_cache with network_info: [{"id": "360ebc0c-7601-4e8c-87a5-65b79b2ae569", "address": "fa:16:3e:6b:9f:39", "network": {"id": "a1a2ebf5-0a3e-4f93-9a47-bd8cdab108ce", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1557111633-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5577b40f56af44eebd47761192e9510f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap360ebc0c-76", "ovs_interfaceid": "360ebc0c-7601-4e8c-87a5-65b79b2ae569", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1877.362534] env[63024]: DEBUG nova.compute.manager [None req-7c86cf25-d8d6-4123-928f-07d445feb5a9 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 37792b57-3347-4134-a060-53359afa3298] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1877.362759] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-7c86cf25-d8d6-4123-928f-07d445feb5a9 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 37792b57-3347-4134-a060-53359afa3298] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1877.365073] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4e69892-926b-4e3b-9d2b-1999096253b9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.373975] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c86cf25-d8d6-4123-928f-07d445feb5a9 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 37792b57-3347-4134-a060-53359afa3298] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1877.376982] env[63024]: DEBUG oslo_concurrency.lockutils [None req-be9909d5-91df-4a2a-b197-4a7a1bb56351 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Acquiring lock "7cf0ac90-d87d-4644-8a88-da5328d1721d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1877.377252] env[63024]: DEBUG oslo_concurrency.lockutils [None req-be9909d5-91df-4a2a-b197-4a7a1bb56351 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Lock "7cf0ac90-d87d-4644-8a88-da5328d1721d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1877.377523] env[63024]: DEBUG oslo_concurrency.lockutils [None req-be9909d5-91df-4a2a-b197-4a7a1bb56351 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Acquiring lock "7cf0ac90-d87d-4644-8a88-da5328d1721d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1877.377645] env[63024]: DEBUG oslo_concurrency.lockutils [None req-be9909d5-91df-4a2a-b197-4a7a1bb56351 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Lock "7cf0ac90-d87d-4644-8a88-da5328d1721d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1877.377812] env[63024]: DEBUG oslo_concurrency.lockutils [None req-be9909d5-91df-4a2a-b197-4a7a1bb56351 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Lock "7cf0ac90-d87d-4644-8a88-da5328d1721d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1877.379382] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1f4678e2-b32a-4f99-a6f8-c376435179df {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.381262] env[63024]: INFO nova.compute.manager [None req-be9909d5-91df-4a2a-b197-4a7a1bb56351 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] Terminating instance [ 1877.390023] env[63024]: DEBUG oslo_vmware.api [None req-7c86cf25-d8d6-4123-928f-07d445feb5a9 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1877.390023] env[63024]: value = "task-1951184" [ 1877.390023] env[63024]: _type = "Task" [ 1877.390023] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1877.405089] env[63024]: DEBUG oslo_vmware.api [None req-7c86cf25-d8d6-4123-928f-07d445feb5a9 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951184, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1877.411533] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquiring lock "a0a9ea07-dda8-45b4-bab9-cdaf683c0a21" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1877.411778] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "a0a9ea07-dda8-45b4-bab9-cdaf683c0a21" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1877.479735] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 9cf45c3a-2a74-4f8e-8817-47bbd748a44b] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 1877.649590] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951181, 'name': CreateVM_Task, 'duration_secs': 2.121356} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1877.649769] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1877.650478] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1877.650704] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1877.651366] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1877.656035] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea07a33f-6b26-47f7-8702-719f8eb0d12d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.661069] env[63024]: DEBUG oslo_vmware.api [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 1877.661069] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5238920b-3ca9-6f39-a2ea-f38f19d36e2d" [ 1877.661069] env[63024]: _type = "Task" [ 1877.661069] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1877.670440] env[63024]: DEBUG oslo_vmware.api [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5238920b-3ca9-6f39-a2ea-f38f19d36e2d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1877.674154] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd31fd1e-58dc-446b-9390-32dcf6e1c90e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.687527] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-468e8b6e-204c-4161-90c3-b58684b4afc2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.721283] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2168b9d-02c5-4d8b-8150-e30f78aefbb6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.730161] env[63024]: DEBUG oslo_vmware.api [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951183, 'name': ReconfigVM_Task, 'duration_secs': 1.411944} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1877.732250] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Reconfigured VM instance instance-00000049 to attach disk [datastore1] 9a7f4452-ae50-4779-8474-11d3a6d3533f/9a7f4452-ae50-4779-8474-11d3a6d3533f.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1877.732908] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1899582c-0d70-4f5c-89fd-489734e60060 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.735348] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-176feadd-4835-4bb0-9f5e-fc9b8b3246fc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.751802] env[63024]: DEBUG nova.compute.provider_tree [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1877.756265] env[63024]: DEBUG oslo_vmware.api [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 1877.756265] env[63024]: value = "task-1951185" [ 1877.756265] env[63024]: _type = "Task" [ 1877.756265] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1877.766550] env[63024]: DEBUG oslo_vmware.api [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951185, 'name': Rename_Task} progress is 10%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1877.772638] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Releasing lock "refresh_cache-9ca6342c-55bd-4c78-9fa6-3caf4ec744bc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1877.773043] env[63024]: DEBUG nova.compute.manager [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Instance network_info: |[{"id": "360ebc0c-7601-4e8c-87a5-65b79b2ae569", "address": "fa:16:3e:6b:9f:39", "network": {"id": "a1a2ebf5-0a3e-4f93-9a47-bd8cdab108ce", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1557111633-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5577b40f56af44eebd47761192e9510f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap360ebc0c-76", "ovs_interfaceid": "360ebc0c-7601-4e8c-87a5-65b79b2ae569", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1877.773364] env[63024]: DEBUG oslo_concurrency.lockutils [req-790399c9-fe52-4399-98b9-0abf9e7cbd9c req-4dfdb4fe-a05f-4f7c-a8aa-662a2309befd service nova] Acquired lock "refresh_cache-9ca6342c-55bd-4c78-9fa6-3caf4ec744bc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1877.773555] env[63024]: DEBUG nova.network.neutron [req-790399c9-fe52-4399-98b9-0abf9e7cbd9c req-4dfdb4fe-a05f-4f7c-a8aa-662a2309befd service nova] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Refreshing network info cache for port 360ebc0c-7601-4e8c-87a5-65b79b2ae569 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1877.775441] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6b:9f:39', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cf5bfbae-a882-4d34-be33-b31e274b3077', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '360ebc0c-7601-4e8c-87a5-65b79b2ae569', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1877.783818] env[63024]: DEBUG oslo.service.loopingcall [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1877.784596] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1877.784846] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1093416f-62f2-4011-bed8-22cfbdc6d583 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.810049] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1877.810049] env[63024]: value = "task-1951186" [ 1877.810049] env[63024]: _type = "Task" [ 1877.810049] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1877.818664] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951186, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1877.885538] env[63024]: DEBUG nova.compute.manager [None req-be9909d5-91df-4a2a-b197-4a7a1bb56351 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1877.885841] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-be9909d5-91df-4a2a-b197-4a7a1bb56351 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1877.887122] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb76f5a5-9cc5-4e52-8432-929c53c271ba {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.896363] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-be9909d5-91df-4a2a-b197-4a7a1bb56351 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1877.896818] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-42d8a298-f68f-4da2-ac54-a5d2bbefe9c2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.901869] env[63024]: DEBUG oslo_vmware.api [None req-7c86cf25-d8d6-4123-928f-07d445feb5a9 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951184, 'name': PowerOffVM_Task, 'duration_secs': 0.294605} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1877.902549] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c86cf25-d8d6-4123-928f-07d445feb5a9 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 37792b57-3347-4134-a060-53359afa3298] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1877.902735] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-7c86cf25-d8d6-4123-928f-07d445feb5a9 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 37792b57-3347-4134-a060-53359afa3298] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1877.902998] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-594d9daf-4d36-4801-b327-f85755e24e24 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1877.907991] env[63024]: DEBUG oslo_vmware.api [None req-be9909d5-91df-4a2a-b197-4a7a1bb56351 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Waiting for the task: (returnval){ [ 1877.907991] env[63024]: value = "task-1951187" [ 1877.907991] env[63024]: _type = "Task" [ 1877.907991] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1877.917021] env[63024]: DEBUG nova.compute.manager [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1877.919739] env[63024]: DEBUG oslo_vmware.api [None req-be9909d5-91df-4a2a-b197-4a7a1bb56351 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951187, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1877.983887] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: bd07735a-6a75-45fb-9cef-e1f2c301a489] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 1878.101882] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-7c86cf25-d8d6-4123-928f-07d445feb5a9 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 37792b57-3347-4134-a060-53359afa3298] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1878.102157] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-7c86cf25-d8d6-4123-928f-07d445feb5a9 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 37792b57-3347-4134-a060-53359afa3298] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1878.102559] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c86cf25-d8d6-4123-928f-07d445feb5a9 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Deleting the datastore file [datastore1] 37792b57-3347-4134-a060-53359afa3298 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1878.102987] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-039c8011-cc2e-4042-a5c3-20725e0ea709 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.111875] env[63024]: DEBUG nova.compute.manager [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1878.114385] env[63024]: DEBUG oslo_vmware.api [None req-7c86cf25-d8d6-4123-928f-07d445feb5a9 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1878.114385] env[63024]: value = "task-1951189" [ 1878.114385] env[63024]: _type = "Task" [ 1878.114385] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1878.124548] env[63024]: DEBUG oslo_vmware.api [None req-7c86cf25-d8d6-4123-928f-07d445feb5a9 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951189, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1878.140983] env[63024]: DEBUG nova.virt.hardware [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1878.141297] env[63024]: DEBUG nova.virt.hardware [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1878.141551] env[63024]: DEBUG nova.virt.hardware [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1878.141745] env[63024]: DEBUG nova.virt.hardware [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1878.141898] env[63024]: DEBUG nova.virt.hardware [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1878.142057] env[63024]: DEBUG nova.virt.hardware [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1878.142341] env[63024]: DEBUG nova.virt.hardware [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1878.142512] env[63024]: DEBUG nova.virt.hardware [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1878.142678] env[63024]: DEBUG nova.virt.hardware [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1878.142837] env[63024]: DEBUG nova.virt.hardware [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1878.143011] env[63024]: DEBUG nova.virt.hardware [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1878.144035] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff7ba068-5106-458e-811d-08028da3baf0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.152619] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17966654-0673-45a4-a692-a50253cb9cc1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.182415] env[63024]: DEBUG oslo_vmware.api [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5238920b-3ca9-6f39-a2ea-f38f19d36e2d, 'name': SearchDatastore_Task, 'duration_secs': 0.010367} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1878.182415] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1878.182705] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1878.182898] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1878.183050] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1878.183233] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1878.183509] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6865b352-ba46-45bc-973b-a6a63709d1a3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.203572] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1878.205264] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1878.206455] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c98660e1-b94c-490b-b63b-f0150fdaed71 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.215573] env[63024]: DEBUG oslo_vmware.api [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 1878.215573] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]520af55c-7e09-6d0e-e2af-643e95f2d69a" [ 1878.215573] env[63024]: _type = "Task" [ 1878.215573] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1878.228822] env[63024]: DEBUG oslo_vmware.api [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]520af55c-7e09-6d0e-e2af-643e95f2d69a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1878.255769] env[63024]: DEBUG nova.scheduler.client.report [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1878.270757] env[63024]: DEBUG oslo_vmware.api [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951185, 'name': Rename_Task, 'duration_secs': 0.141953} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1878.271123] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1878.271416] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c39caafe-01de-4bf7-8969-c5913f0dad32 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.281248] env[63024]: DEBUG oslo_vmware.api [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 1878.281248] env[63024]: value = "task-1951190" [ 1878.281248] env[63024]: _type = "Task" [ 1878.281248] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1878.294727] env[63024]: DEBUG oslo_vmware.api [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951190, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1878.321295] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951186, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1878.427324] env[63024]: DEBUG oslo_vmware.api [None req-be9909d5-91df-4a2a-b197-4a7a1bb56351 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951187, 'name': PowerOffVM_Task, 'duration_secs': 0.323814} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1878.431449] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-be9909d5-91df-4a2a-b197-4a7a1bb56351 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1878.431790] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-be9909d5-91df-4a2a-b197-4a7a1bb56351 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1878.432164] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0b12652e-2b12-4464-bd02-c7f8b28770b8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.447326] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1878.486343] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: b765b8b3-a099-4e23-be30-d1178ecffc37] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 1878.515561] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-be9909d5-91df-4a2a-b197-4a7a1bb56351 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1878.515561] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-be9909d5-91df-4a2a-b197-4a7a1bb56351 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1878.515561] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-be9909d5-91df-4a2a-b197-4a7a1bb56351 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Deleting the datastore file [datastore1] 7cf0ac90-d87d-4644-8a88-da5328d1721d {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1878.515561] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fa3ffdc1-0b91-4a6f-86aa-d3917227d2ea {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.528015] env[63024]: DEBUG oslo_vmware.api [None req-be9909d5-91df-4a2a-b197-4a7a1bb56351 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Waiting for the task: (returnval){ [ 1878.528015] env[63024]: value = "task-1951192" [ 1878.528015] env[63024]: _type = "Task" [ 1878.528015] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1878.536751] env[63024]: DEBUG oslo_vmware.api [None req-be9909d5-91df-4a2a-b197-4a7a1bb56351 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951192, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1878.542078] env[63024]: DEBUG nova.network.neutron [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] Successfully updated port: 736ac7e2-7a3c-429f-ad94-557967750b24 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1878.625526] env[63024]: DEBUG oslo_vmware.api [None req-7c86cf25-d8d6-4123-928f-07d445feb5a9 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951189, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.191943} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1878.625813] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c86cf25-d8d6-4123-928f-07d445feb5a9 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1878.626093] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-7c86cf25-d8d6-4123-928f-07d445feb5a9 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 37792b57-3347-4134-a060-53359afa3298] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1878.626299] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-7c86cf25-d8d6-4123-928f-07d445feb5a9 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 37792b57-3347-4134-a060-53359afa3298] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1878.626468] env[63024]: INFO nova.compute.manager [None req-7c86cf25-d8d6-4123-928f-07d445feb5a9 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: 37792b57-3347-4134-a060-53359afa3298] Took 1.26 seconds to destroy the instance on the hypervisor. [ 1878.626706] env[63024]: DEBUG oslo.service.loopingcall [None req-7c86cf25-d8d6-4123-928f-07d445feb5a9 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1878.626893] env[63024]: DEBUG nova.compute.manager [-] [instance: 37792b57-3347-4134-a060-53359afa3298] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1878.626987] env[63024]: DEBUG nova.network.neutron [-] [instance: 37792b57-3347-4134-a060-53359afa3298] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1878.663476] env[63024]: DEBUG nova.network.neutron [req-790399c9-fe52-4399-98b9-0abf9e7cbd9c req-4dfdb4fe-a05f-4f7c-a8aa-662a2309befd service nova] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Updated VIF entry in instance network info cache for port 360ebc0c-7601-4e8c-87a5-65b79b2ae569. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1878.663947] env[63024]: DEBUG nova.network.neutron [req-790399c9-fe52-4399-98b9-0abf9e7cbd9c req-4dfdb4fe-a05f-4f7c-a8aa-662a2309befd service nova] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Updating instance_info_cache with network_info: [{"id": "360ebc0c-7601-4e8c-87a5-65b79b2ae569", "address": "fa:16:3e:6b:9f:39", "network": {"id": "a1a2ebf5-0a3e-4f93-9a47-bd8cdab108ce", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1557111633-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5577b40f56af44eebd47761192e9510f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap360ebc0c-76", "ovs_interfaceid": "360ebc0c-7601-4e8c-87a5-65b79b2ae569", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1878.728565] env[63024]: DEBUG oslo_vmware.api [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]520af55c-7e09-6d0e-e2af-643e95f2d69a, 'name': SearchDatastore_Task, 'duration_secs': 0.012279} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1878.729926] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56aa050a-3383-479e-bdf8-fa7731166330 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.738473] env[63024]: DEBUG oslo_vmware.api [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 1878.738473] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52a6c03d-f3af-7451-e996-27813f5c4640" [ 1878.738473] env[63024]: _type = "Task" [ 1878.738473] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1878.748389] env[63024]: DEBUG oslo_vmware.api [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52a6c03d-f3af-7451-e996-27813f5c4640, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1878.766483] env[63024]: DEBUG oslo_concurrency.lockutils [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.680s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1878.767069] env[63024]: DEBUG nova.compute.manager [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1878.770649] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ba82b5f5-8c25-43c9-bd8c-5d6a37f2ccbd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.122s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1878.770649] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ba82b5f5-8c25-43c9-bd8c-5d6a37f2ccbd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1878.772105] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.431s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1878.773658] env[63024]: INFO nova.compute.claims [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1878.796944] env[63024]: DEBUG oslo_vmware.api [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951190, 'name': PowerOnVM_Task} progress is 90%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1878.810147] env[63024]: INFO nova.scheduler.client.report [None req-ba82b5f5-8c25-43c9-bd8c-5d6a37f2ccbd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Deleted allocations for instance b7f26f0e-d5a9-42a6-8af2-065659f89cf5 [ 1878.826455] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951186, 'name': CreateVM_Task, 'duration_secs': 0.576745} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1878.826946] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1878.827779] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1878.827862] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1878.828182] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1878.828593] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d819cf4-fe52-4a2a-bab2-e4c43bd77ba5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1878.837971] env[63024]: DEBUG oslo_vmware.api [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1878.837971] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b5fa30-922d-7c54-5841-4aa7e48dfaa8" [ 1878.837971] env[63024]: _type = "Task" [ 1878.837971] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1878.847933] env[63024]: DEBUG oslo_vmware.api [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b5fa30-922d-7c54-5841-4aa7e48dfaa8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1878.914323] env[63024]: DEBUG nova.compute.manager [req-de76727e-7315-4523-aec6-a7678d4a021e req-358df9b1-f702-4279-8b7f-7d66a78fcfce service nova] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] Received event network-vif-plugged-736ac7e2-7a3c-429f-ad94-557967750b24 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1878.914323] env[63024]: DEBUG oslo_concurrency.lockutils [req-de76727e-7315-4523-aec6-a7678d4a021e req-358df9b1-f702-4279-8b7f-7d66a78fcfce service nova] Acquiring lock "9edbda30-2e28-4961-a6ad-5ab34c40ed44-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1878.914323] env[63024]: DEBUG oslo_concurrency.lockutils [req-de76727e-7315-4523-aec6-a7678d4a021e req-358df9b1-f702-4279-8b7f-7d66a78fcfce service nova] Lock "9edbda30-2e28-4961-a6ad-5ab34c40ed44-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1878.914323] env[63024]: DEBUG oslo_concurrency.lockutils [req-de76727e-7315-4523-aec6-a7678d4a021e req-358df9b1-f702-4279-8b7f-7d66a78fcfce service nova] Lock "9edbda30-2e28-4961-a6ad-5ab34c40ed44-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1878.914323] env[63024]: DEBUG nova.compute.manager [req-de76727e-7315-4523-aec6-a7678d4a021e req-358df9b1-f702-4279-8b7f-7d66a78fcfce service nova] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] No waiting events found dispatching network-vif-plugged-736ac7e2-7a3c-429f-ad94-557967750b24 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1878.914323] env[63024]: WARNING nova.compute.manager [req-de76727e-7315-4523-aec6-a7678d4a021e req-358df9b1-f702-4279-8b7f-7d66a78fcfce service nova] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] Received unexpected event network-vif-plugged-736ac7e2-7a3c-429f-ad94-557967750b24 for instance with vm_state building and task_state spawning. [ 1878.914323] env[63024]: DEBUG nova.compute.manager [req-de76727e-7315-4523-aec6-a7678d4a021e req-358df9b1-f702-4279-8b7f-7d66a78fcfce service nova] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] Received event network-changed-736ac7e2-7a3c-429f-ad94-557967750b24 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1878.914323] env[63024]: DEBUG nova.compute.manager [req-de76727e-7315-4523-aec6-a7678d4a021e req-358df9b1-f702-4279-8b7f-7d66a78fcfce service nova] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] Refreshing instance network info cache due to event network-changed-736ac7e2-7a3c-429f-ad94-557967750b24. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1878.914323] env[63024]: DEBUG oslo_concurrency.lockutils [req-de76727e-7315-4523-aec6-a7678d4a021e req-358df9b1-f702-4279-8b7f-7d66a78fcfce service nova] Acquiring lock "refresh_cache-9edbda30-2e28-4961-a6ad-5ab34c40ed44" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1878.914323] env[63024]: DEBUG oslo_concurrency.lockutils [req-de76727e-7315-4523-aec6-a7678d4a021e req-358df9b1-f702-4279-8b7f-7d66a78fcfce service nova] Acquired lock "refresh_cache-9edbda30-2e28-4961-a6ad-5ab34c40ed44" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1878.914758] env[63024]: DEBUG nova.network.neutron [req-de76727e-7315-4523-aec6-a7678d4a021e req-358df9b1-f702-4279-8b7f-7d66a78fcfce service nova] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] Refreshing network info cache for port 736ac7e2-7a3c-429f-ad94-557967750b24 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1878.989166] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: e2138192-14e0-43d2-9d19-9820747d7217] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 1879.037315] env[63024]: DEBUG oslo_vmware.api [None req-be9909d5-91df-4a2a-b197-4a7a1bb56351 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951192, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.185975} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1879.037568] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-be9909d5-91df-4a2a-b197-4a7a1bb56351 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1879.037753] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-be9909d5-91df-4a2a-b197-4a7a1bb56351 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1879.037926] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-be9909d5-91df-4a2a-b197-4a7a1bb56351 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1879.038103] env[63024]: INFO nova.compute.manager [None req-be9909d5-91df-4a2a-b197-4a7a1bb56351 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1879.038344] env[63024]: DEBUG oslo.service.loopingcall [None req-be9909d5-91df-4a2a-b197-4a7a1bb56351 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1879.038537] env[63024]: DEBUG nova.compute.manager [-] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1879.038621] env[63024]: DEBUG nova.network.neutron [-] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1879.045011] env[63024]: DEBUG oslo_concurrency.lockutils [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Acquiring lock "refresh_cache-9edbda30-2e28-4961-a6ad-5ab34c40ed44" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1879.166795] env[63024]: DEBUG oslo_concurrency.lockutils [req-790399c9-fe52-4399-98b9-0abf9e7cbd9c req-4dfdb4fe-a05f-4f7c-a8aa-662a2309befd service nova] Releasing lock "refresh_cache-9ca6342c-55bd-4c78-9fa6-3caf4ec744bc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1879.253222] env[63024]: DEBUG oslo_vmware.api [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52a6c03d-f3af-7451-e996-27813f5c4640, 'name': SearchDatastore_Task, 'duration_secs': 0.019747} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1879.255903] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1879.256238] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 0f371c69-c7ae-4649-b038-be82e8ca74e1/0f371c69-c7ae-4649-b038-be82e8ca74e1.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1879.256719] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-49d3fedd-fcc0-41f9-83f0-643437bf7cc3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.266341] env[63024]: DEBUG oslo_vmware.api [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 1879.266341] env[63024]: value = "task-1951193" [ 1879.266341] env[63024]: _type = "Task" [ 1879.266341] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1879.278416] env[63024]: DEBUG nova.compute.utils [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1879.279759] env[63024]: DEBUG oslo_vmware.api [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951193, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1879.282034] env[63024]: DEBUG nova.compute.manager [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1879.282207] env[63024]: DEBUG nova.network.neutron [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1879.294351] env[63024]: DEBUG oslo_vmware.api [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951190, 'name': PowerOnVM_Task, 'duration_secs': 0.599537} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1879.296250] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1879.296250] env[63024]: INFO nova.compute.manager [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Took 9.23 seconds to spawn the instance on the hypervisor. [ 1879.296250] env[63024]: DEBUG nova.compute.manager [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1879.296250] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74aa9d15-d939-4cc4-a6e7-316235295804 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.323879] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ba82b5f5-8c25-43c9-bd8c-5d6a37f2ccbd tempest-VolumesAdminNegativeTest-1443872821 tempest-VolumesAdminNegativeTest-1443872821-project-member] Lock "b7f26f0e-d5a9-42a6-8af2-065659f89cf5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.369s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1879.332724] env[63024]: DEBUG nova.policy [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a2cc094a0a6b444ab1880fcfb1de4e8b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6bbfeec6d47746328f185acd132e0d5a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1879.349725] env[63024]: DEBUG oslo_vmware.api [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b5fa30-922d-7c54-5841-4aa7e48dfaa8, 'name': SearchDatastore_Task, 'duration_secs': 0.012695} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1879.350993] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1879.351480] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1879.351931] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1879.352224] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1879.352647] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1879.353105] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-14e07d8e-0ef6-45d8-a79a-888f7722c2a6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.364315] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1879.364527] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1879.365234] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb8abd1d-1d06-4e84-b45c-df242cbdee37 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.372587] env[63024]: DEBUG oslo_vmware.api [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1879.372587] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52020b8c-9eab-5dd1-1a3a-6f4e516b2242" [ 1879.372587] env[63024]: _type = "Task" [ 1879.372587] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1879.383461] env[63024]: DEBUG oslo_vmware.api [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52020b8c-9eab-5dd1-1a3a-6f4e516b2242, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1879.416747] env[63024]: DEBUG nova.network.neutron [-] [instance: 37792b57-3347-4134-a060-53359afa3298] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1879.494040] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 17e1dfa2-b104-4aac-928e-6364da155c3d] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 1879.512176] env[63024]: DEBUG nova.network.neutron [req-de76727e-7315-4523-aec6-a7678d4a021e req-358df9b1-f702-4279-8b7f-7d66a78fcfce service nova] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1879.700185] env[63024]: DEBUG nova.network.neutron [req-de76727e-7315-4523-aec6-a7678d4a021e req-358df9b1-f702-4279-8b7f-7d66a78fcfce service nova] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1879.725342] env[63024]: DEBUG nova.network.neutron [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Successfully created port: 749aba46-5057-4a6a-8e7c-f7df42b7d129 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1879.758571] env[63024]: DEBUG nova.network.neutron [-] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1879.779871] env[63024]: DEBUG oslo_vmware.api [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951193, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1879.787956] env[63024]: DEBUG nova.compute.manager [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1879.821347] env[63024]: INFO nova.compute.manager [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Took 43.24 seconds to build instance. [ 1879.886328] env[63024]: DEBUG oslo_vmware.api [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52020b8c-9eab-5dd1-1a3a-6f4e516b2242, 'name': SearchDatastore_Task, 'duration_secs': 0.021983} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1879.887367] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f4fc163-9d38-40f4-99a1-c27af5841444 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1879.894251] env[63024]: DEBUG oslo_vmware.api [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1879.894251] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52aa9566-5928-224f-a7a5-6fe3725d8f96" [ 1879.894251] env[63024]: _type = "Task" [ 1879.894251] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1879.907056] env[63024]: DEBUG oslo_vmware.api [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52aa9566-5928-224f-a7a5-6fe3725d8f96, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1879.924677] env[63024]: INFO nova.compute.manager [-] [instance: 37792b57-3347-4134-a060-53359afa3298] Took 1.30 seconds to deallocate network for instance. [ 1879.996847] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 726d9639-1ab4-46a9-975e-5580c8344a37] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 1880.202198] env[63024]: DEBUG oslo_concurrency.lockutils [req-de76727e-7315-4523-aec6-a7678d4a021e req-358df9b1-f702-4279-8b7f-7d66a78fcfce service nova] Releasing lock "refresh_cache-9edbda30-2e28-4961-a6ad-5ab34c40ed44" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1880.202586] env[63024]: DEBUG oslo_concurrency.lockutils [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Acquired lock "refresh_cache-9edbda30-2e28-4961-a6ad-5ab34c40ed44" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1880.202745] env[63024]: DEBUG nova.network.neutron [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1880.262293] env[63024]: INFO nova.compute.manager [-] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] Took 1.22 seconds to deallocate network for instance. [ 1880.285911] env[63024]: DEBUG oslo_vmware.api [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951193, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.639485} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1880.286206] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 0f371c69-c7ae-4649-b038-be82e8ca74e1/0f371c69-c7ae-4649-b038-be82e8ca74e1.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1880.286418] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1880.286668] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c62b1404-9c74-4958-8f29-1177b2ca1c71 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.294813] env[63024]: DEBUG oslo_vmware.api [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 1880.294813] env[63024]: value = "task-1951194" [ 1880.294813] env[63024]: _type = "Task" [ 1880.294813] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1880.306742] env[63024]: DEBUG oslo_vmware.api [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951194, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.321941] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8e2949fe-c5e9-4b8c-96c6-e92269a4cbe6 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "9a7f4452-ae50-4779-8474-11d3a6d3533f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 75.227s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1880.344319] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11b2a2bb-9195-437d-b8b4-db09bc43d030 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.353896] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7746a28-29e1-4cf5-9f49-f075aa921aff {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.385348] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d9fd32c-f5b0-4df5-9c86-221d395755b7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.393572] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7965396-e4f7-4a3c-b8a9-a0edeffe66ab {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.415912] env[63024]: DEBUG nova.compute.provider_tree [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1880.421770] env[63024]: DEBUG oslo_vmware.api [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52aa9566-5928-224f-a7a5-6fe3725d8f96, 'name': SearchDatastore_Task, 'duration_secs': 0.016034} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1880.422249] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1880.422550] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc/9ca6342c-55bd-4c78-9fa6-3caf4ec744bc.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1880.422819] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e9ba7928-323e-4765-8422-b77fe692d910 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.433969] env[63024]: DEBUG oslo_vmware.api [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1880.433969] env[63024]: value = "task-1951195" [ 1880.433969] env[63024]: _type = "Task" [ 1880.433969] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1880.434919] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7c86cf25-d8d6-4123-928f-07d445feb5a9 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1880.445064] env[63024]: DEBUG oslo_vmware.api [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951195, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.501024] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 04bd30dc-9d1d-4a49-ba30-40cc4d6e06dd] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 1880.769085] env[63024]: DEBUG oslo_concurrency.lockutils [None req-be9909d5-91df-4a2a-b197-4a7a1bb56351 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1880.797234] env[63024]: DEBUG nova.compute.manager [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1880.808823] env[63024]: DEBUG oslo_vmware.api [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951194, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077769} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1880.809598] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1880.811436] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a7fdba7-1452-4c3d-bd82-e0136e7b7506 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.835859] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Reconfiguring VM instance instance-0000004a to attach disk [datastore1] 0f371c69-c7ae-4649-b038-be82e8ca74e1/0f371c69-c7ae-4649-b038-be82e8ca74e1.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1880.838219] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-249ba0e5-17b4-4abf-915c-c5b82823a50a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.859946] env[63024]: DEBUG oslo_vmware.api [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 1880.859946] env[63024]: value = "task-1951196" [ 1880.859946] env[63024]: _type = "Task" [ 1880.859946] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1880.862251] env[63024]: DEBUG nova.virt.hardware [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1880.862486] env[63024]: DEBUG nova.virt.hardware [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1880.862627] env[63024]: DEBUG nova.virt.hardware [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1880.862805] env[63024]: DEBUG nova.virt.hardware [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1880.862977] env[63024]: DEBUG nova.virt.hardware [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1880.863101] env[63024]: DEBUG nova.virt.hardware [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1880.863310] env[63024]: DEBUG nova.virt.hardware [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1880.863466] env[63024]: DEBUG nova.virt.hardware [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1880.863654] env[63024]: DEBUG nova.virt.hardware [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1880.863823] env[63024]: DEBUG nova.virt.hardware [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1880.863997] env[63024]: DEBUG nova.virt.hardware [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1880.865021] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51e23124-37c2-4648-aac9-40ab25ccff8a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.878769] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dd294b8-c014-49af-8e59-4cc7e61baaa7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1880.883302] env[63024]: DEBUG oslo_vmware.api [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951196, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.923788] env[63024]: DEBUG nova.scheduler.client.report [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1880.946712] env[63024]: DEBUG oslo_vmware.api [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951195, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1880.987697] env[63024]: DEBUG nova.network.neutron [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1881.004504] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 6e477ec2-9270-42b1-85bd-a315460d9cab] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 1881.327207] env[63024]: DEBUG nova.network.neutron [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] Updating instance_info_cache with network_info: [{"id": "736ac7e2-7a3c-429f-ad94-557967750b24", "address": "fa:16:3e:7c:1c:b0", "network": {"id": "feb2323b-f3cf-42d6-a22b-81d1c94fce9d", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-75667819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f87eadd82394447910efa7b71814e97", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "44ed8f45-cb8e-40e7-ac70-a7f386a7d2c2", "external-id": "nsx-vlan-transportzone-268", "segmentation_id": 268, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap736ac7e2-7a", "ovs_interfaceid": "736ac7e2-7a3c-429f-ad94-557967750b24", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1881.379953] env[63024]: DEBUG oslo_vmware.api [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951196, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.428953] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.657s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1881.429672] env[63024]: DEBUG nova.compute.manager [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1881.435215] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4a8357c2-d3bd-46b3-9705-23a858a5bc94 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.916s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1881.435478] env[63024]: DEBUG nova.objects.instance [None req-4a8357c2-d3bd-46b3-9705-23a858a5bc94 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Lazy-loading 'resources' on Instance uuid e0a37f54-14ca-4eea-a9b3-6e652ca1e48d {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1881.453249] env[63024]: DEBUG oslo_vmware.api [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951195, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.992113} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1881.453534] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc/9ca6342c-55bd-4c78-9fa6-3caf4ec744bc.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1881.453703] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1881.454159] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8f39627a-5992-4eac-a74b-a1bde040c02f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.463504] env[63024]: DEBUG oslo_vmware.api [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1881.463504] env[63024]: value = "task-1951197" [ 1881.463504] env[63024]: _type = "Task" [ 1881.463504] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1881.474918] env[63024]: DEBUG oslo_vmware.api [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951197, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.477959] env[63024]: DEBUG nova.compute.manager [req-2478b74a-257b-4248-a640-45628635e9cf req-18ecc3cc-94e8-4b87-85bc-de6a890b113a service nova] [instance: 37792b57-3347-4134-a060-53359afa3298] Received event network-vif-deleted-f4505342-e35b-4162-a5ba-ed4d32e9ea65 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1881.478287] env[63024]: DEBUG nova.compute.manager [req-2478b74a-257b-4248-a640-45628635e9cf req-18ecc3cc-94e8-4b87-85bc-de6a890b113a service nova] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] Received event network-vif-deleted-23911ba0-0750-48de-9e80-03a0356b0496 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1881.478486] env[63024]: DEBUG nova.compute.manager [req-2478b74a-257b-4248-a640-45628635e9cf req-18ecc3cc-94e8-4b87-85bc-de6a890b113a service nova] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Received event network-changed-989997b7-12bd-4924-97e2-a65914c47536 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1881.478569] env[63024]: DEBUG nova.compute.manager [req-2478b74a-257b-4248-a640-45628635e9cf req-18ecc3cc-94e8-4b87-85bc-de6a890b113a service nova] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Refreshing instance network info cache due to event network-changed-989997b7-12bd-4924-97e2-a65914c47536. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1881.478773] env[63024]: DEBUG oslo_concurrency.lockutils [req-2478b74a-257b-4248-a640-45628635e9cf req-18ecc3cc-94e8-4b87-85bc-de6a890b113a service nova] Acquiring lock "refresh_cache-9a7f4452-ae50-4779-8474-11d3a6d3533f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1881.478954] env[63024]: DEBUG oslo_concurrency.lockutils [req-2478b74a-257b-4248-a640-45628635e9cf req-18ecc3cc-94e8-4b87-85bc-de6a890b113a service nova] Acquired lock "refresh_cache-9a7f4452-ae50-4779-8474-11d3a6d3533f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1881.479102] env[63024]: DEBUG nova.network.neutron [req-2478b74a-257b-4248-a640-45628635e9cf req-18ecc3cc-94e8-4b87-85bc-de6a890b113a service nova] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Refreshing network info cache for port 989997b7-12bd-4924-97e2-a65914c47536 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1881.507911] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: de31255d-b82f-4f32-82b2-0a8368fe2510] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 1881.826373] env[63024]: DEBUG nova.network.neutron [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Successfully updated port: 749aba46-5057-4a6a-8e7c-f7df42b7d129 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1881.838264] env[63024]: DEBUG oslo_concurrency.lockutils [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Releasing lock "refresh_cache-9edbda30-2e28-4961-a6ad-5ab34c40ed44" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1881.838264] env[63024]: DEBUG nova.compute.manager [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] Instance network_info: |[{"id": "736ac7e2-7a3c-429f-ad94-557967750b24", "address": "fa:16:3e:7c:1c:b0", "network": {"id": "feb2323b-f3cf-42d6-a22b-81d1c94fce9d", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-75667819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f87eadd82394447910efa7b71814e97", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "44ed8f45-cb8e-40e7-ac70-a7f386a7d2c2", "external-id": "nsx-vlan-transportzone-268", "segmentation_id": 268, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap736ac7e2-7a", "ovs_interfaceid": "736ac7e2-7a3c-429f-ad94-557967750b24", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1881.838264] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7c:1c:b0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '44ed8f45-cb8e-40e7-ac70-a7f386a7d2c2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '736ac7e2-7a3c-429f-ad94-557967750b24', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1881.852177] env[63024]: DEBUG oslo.service.loopingcall [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1881.852751] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1881.853197] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-062ee0c0-0070-4126-ba54-38af47ea435d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.885632] env[63024]: DEBUG oslo_vmware.api [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951196, 'name': ReconfigVM_Task, 'duration_secs': 0.688341} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1881.887055] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Reconfigured VM instance instance-0000004a to attach disk [datastore1] 0f371c69-c7ae-4649-b038-be82e8ca74e1/0f371c69-c7ae-4649-b038-be82e8ca74e1.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1881.887955] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1881.887955] env[63024]: value = "task-1951198" [ 1881.887955] env[63024]: _type = "Task" [ 1881.887955] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1881.887955] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4804e9ce-b015-471b-aa01-b741a9aa6953 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1881.898065] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951198, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.899804] env[63024]: DEBUG oslo_vmware.api [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 1881.899804] env[63024]: value = "task-1951199" [ 1881.899804] env[63024]: _type = "Task" [ 1881.899804] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1881.908550] env[63024]: DEBUG oslo_vmware.api [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951199, 'name': Rename_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1881.940758] env[63024]: DEBUG nova.compute.utils [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1881.945910] env[63024]: DEBUG nova.compute.manager [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1881.946153] env[63024]: DEBUG nova.network.neutron [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1881.978290] env[63024]: DEBUG oslo_vmware.api [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951197, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070271} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1881.978537] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1881.979398] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a7cb5e6-04a6-42b0-af94-be93dbe42720 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.006833] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Reconfiguring VM instance instance-0000004b to attach disk [datastore1] 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc/9ca6342c-55bd-4c78-9fa6-3caf4ec744bc.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1882.011027] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-84267341-471e-408f-ab4e-2209c5bced52 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.028532] env[63024]: DEBUG nova.policy [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '893bfe0d8eef423aae6c7eb5cdc1a9e9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '18540818b60e4483963d14559bc5c38d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1882.030606] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 7146277f-2621-4e8f-a14c-49bf4dd052db] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 1882.042619] env[63024]: DEBUG oslo_vmware.api [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1882.042619] env[63024]: value = "task-1951200" [ 1882.042619] env[63024]: _type = "Task" [ 1882.042619] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1882.060013] env[63024]: DEBUG oslo_vmware.api [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951200, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1882.323363] env[63024]: DEBUG nova.network.neutron [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Successfully created port: ece34716-8aa8-4585-95d9-ccb4eaca0ae6 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1882.330961] env[63024]: DEBUG oslo_concurrency.lockutils [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "refresh_cache-c28e7c21-7e7d-4cda-81e8-63538bd8a1f7" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1882.331038] env[63024]: DEBUG oslo_concurrency.lockutils [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquired lock "refresh_cache-c28e7c21-7e7d-4cda-81e8-63538bd8a1f7" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1882.332442] env[63024]: DEBUG nova.network.neutron [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1882.401162] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951198, 'name': CreateVM_Task, 'duration_secs': 0.461608} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1882.406758] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1882.408653] env[63024]: DEBUG oslo_concurrency.lockutils [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1882.409122] env[63024]: DEBUG oslo_concurrency.lockutils [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1882.409520] env[63024]: DEBUG oslo_concurrency.lockutils [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1882.410258] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-95eba0e9-85ea-4554-a1f3-d2680fbad013 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.417431] env[63024]: DEBUG oslo_vmware.api [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951199, 'name': Rename_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1882.419065] env[63024]: DEBUG oslo_vmware.api [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Waiting for the task: (returnval){ [ 1882.419065] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5204705a-11a0-d8cc-0118-4a6fa771d1e7" [ 1882.419065] env[63024]: _type = "Task" [ 1882.419065] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1882.430602] env[63024]: DEBUG oslo_vmware.api [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5204705a-11a0-d8cc-0118-4a6fa771d1e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1882.449953] env[63024]: DEBUG nova.compute.manager [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1882.536479] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: e03b8577-9298-4e88-98ea-6258e97db28d] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 1882.565723] env[63024]: DEBUG oslo_vmware.api [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951200, 'name': ReconfigVM_Task, 'duration_secs': 0.302765} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1882.569522] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Reconfigured VM instance instance-0000004b to attach disk [datastore1] 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc/9ca6342c-55bd-4c78-9fa6-3caf4ec744bc.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1882.570339] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8a68b0ee-4a91-4681-8432-8d3a46b5c1af {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.579678] env[63024]: DEBUG oslo_vmware.api [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1882.579678] env[63024]: value = "task-1951201" [ 1882.579678] env[63024]: _type = "Task" [ 1882.579678] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1882.592296] env[63024]: DEBUG oslo_vmware.api [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951201, 'name': Rename_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1882.596320] env[63024]: DEBUG nova.network.neutron [req-2478b74a-257b-4248-a640-45628635e9cf req-18ecc3cc-94e8-4b87-85bc-de6a890b113a service nova] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Updated VIF entry in instance network info cache for port 989997b7-12bd-4924-97e2-a65914c47536. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1882.596646] env[63024]: DEBUG nova.network.neutron [req-2478b74a-257b-4248-a640-45628635e9cf req-18ecc3cc-94e8-4b87-85bc-de6a890b113a service nova] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Updating instance_info_cache with network_info: [{"id": "989997b7-12bd-4924-97e2-a65914c47536", "address": "fa:16:3e:3b:9f:01", "network": {"id": "ffb24eaf-c6b6-414f-a69a-0c8806712ddd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-281590202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.234", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9521048e807c4ca2a6d2e74a72b829a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap989997b7-12", "ovs_interfaceid": "989997b7-12bd-4924-97e2-a65914c47536", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1882.640075] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a992381-a990-45c5-b306-715079f40e69 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.652403] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30e51024-f5eb-4206-bd8a-5c4214ce3991 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.684473] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e66749c3-e694-4873-9baa-4c1493756883 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.694176] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b7b8708-1ad1-402d-927c-1ebe13574a8a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.710188] env[63024]: DEBUG nova.compute.provider_tree [None req-4a8357c2-d3bd-46b3-9705-23a858a5bc94 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1882.899891] env[63024]: DEBUG nova.network.neutron [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1882.911760] env[63024]: DEBUG oslo_vmware.api [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951199, 'name': Rename_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1882.929485] env[63024]: DEBUG oslo_vmware.api [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5204705a-11a0-d8cc-0118-4a6fa771d1e7, 'name': SearchDatastore_Task, 'duration_secs': 0.018325} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1882.929763] env[63024]: DEBUG oslo_concurrency.lockutils [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1882.929987] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1882.930223] env[63024]: DEBUG oslo_concurrency.lockutils [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1882.930364] env[63024]: DEBUG oslo_concurrency.lockutils [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1882.930537] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1882.930794] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-73316988-5c8e-42fc-8cce-baca6276af20 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.941737] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1882.941737] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1882.945167] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2cbfeb8e-395f-447a-a5b4-a9df06a393ce {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1882.948709] env[63024]: DEBUG oslo_vmware.api [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Waiting for the task: (returnval){ [ 1882.948709] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52dc0e4c-36e6-73b6-ba07-6330bce0dd83" [ 1882.948709] env[63024]: _type = "Task" [ 1882.948709] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1882.960669] env[63024]: DEBUG oslo_vmware.api [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52dc0e4c-36e6-73b6-ba07-6330bce0dd83, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.044134] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 2bfcd5e1-b1d9-4829-bea5-d8c460ceec16] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 1883.092607] env[63024]: DEBUG oslo_vmware.api [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951201, 'name': Rename_Task, 'duration_secs': 0.219969} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1883.092885] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1883.093379] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f79226a7-ca18-434f-a266-807fd2a29f15 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.099021] env[63024]: DEBUG oslo_concurrency.lockutils [req-2478b74a-257b-4248-a640-45628635e9cf req-18ecc3cc-94e8-4b87-85bc-de6a890b113a service nova] Releasing lock "refresh_cache-9a7f4452-ae50-4779-8474-11d3a6d3533f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1883.101420] env[63024]: DEBUG oslo_vmware.api [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1883.101420] env[63024]: value = "task-1951202" [ 1883.101420] env[63024]: _type = "Task" [ 1883.101420] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1883.110383] env[63024]: DEBUG oslo_vmware.api [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951202, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.214018] env[63024]: DEBUG nova.scheduler.client.report [None req-4a8357c2-d3bd-46b3-9705-23a858a5bc94 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1883.244547] env[63024]: DEBUG nova.network.neutron [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Updating instance_info_cache with network_info: [{"id": "749aba46-5057-4a6a-8e7c-f7df42b7d129", "address": "fa:16:3e:8b:26:58", "network": {"id": "8b46e914-e3e8-40c4-8f9d-01d1f97bf25e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1558746173-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bbfeec6d47746328f185acd132e0d5a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afae6acd-1873-4228-9d5a-1cd5d4efe3e4", "external-id": "nsx-vlan-transportzone-183", "segmentation_id": 183, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap749aba46-50", "ovs_interfaceid": "749aba46-5057-4a6a-8e7c-f7df42b7d129", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1883.412702] env[63024]: DEBUG oslo_vmware.api [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951199, 'name': Rename_Task, 'duration_secs': 1.085205} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1883.413040] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1883.413312] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-79a514d2-5df5-47d0-ad14-a28d66c310b0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.421450] env[63024]: DEBUG oslo_vmware.api [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 1883.421450] env[63024]: value = "task-1951203" [ 1883.421450] env[63024]: _type = "Task" [ 1883.421450] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1883.431418] env[63024]: DEBUG oslo_vmware.api [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951203, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.460140] env[63024]: DEBUG oslo_vmware.api [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52dc0e4c-36e6-73b6-ba07-6330bce0dd83, 'name': SearchDatastore_Task, 'duration_secs': 0.07176} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1883.460934] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f7c5c58-acbf-4bea-b169-65a2334ee9d9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.464188] env[63024]: DEBUG nova.compute.manager [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1883.471779] env[63024]: DEBUG oslo_vmware.api [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Waiting for the task: (returnval){ [ 1883.471779] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e5f201-eeea-8a1d-9c5d-fe08ed4d9d69" [ 1883.471779] env[63024]: _type = "Task" [ 1883.471779] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1883.480609] env[63024]: DEBUG oslo_vmware.api [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e5f201-eeea-8a1d-9c5d-fe08ed4d9d69, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.492185] env[63024]: DEBUG nova.virt.hardware [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1883.492185] env[63024]: DEBUG nova.virt.hardware [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1883.492185] env[63024]: DEBUG nova.virt.hardware [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1883.492756] env[63024]: DEBUG nova.virt.hardware [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1883.492756] env[63024]: DEBUG nova.virt.hardware [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1883.492756] env[63024]: DEBUG nova.virt.hardware [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1883.492756] env[63024]: DEBUG nova.virt.hardware [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1883.492946] env[63024]: DEBUG nova.virt.hardware [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1883.493090] env[63024]: DEBUG nova.virt.hardware [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1883.493427] env[63024]: DEBUG nova.virt.hardware [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1883.493427] env[63024]: DEBUG nova.virt.hardware [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1883.494283] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d1c60bb-c2ee-480d-838c-ebb8fec6db04 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.503120] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-958adf8a-7d3b-4078-9951-a9a66728e182 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.509178] env[63024]: DEBUG nova.compute.manager [req-002617d9-1920-4945-97be-ba2e857855ef req-4590eea6-ddb0-491e-a2c2-6a46b4bcca7e service nova] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Received event network-vif-plugged-749aba46-5057-4a6a-8e7c-f7df42b7d129 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1883.509472] env[63024]: DEBUG oslo_concurrency.lockutils [req-002617d9-1920-4945-97be-ba2e857855ef req-4590eea6-ddb0-491e-a2c2-6a46b4bcca7e service nova] Acquiring lock "c28e7c21-7e7d-4cda-81e8-63538bd8a1f7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1883.509576] env[63024]: DEBUG oslo_concurrency.lockutils [req-002617d9-1920-4945-97be-ba2e857855ef req-4590eea6-ddb0-491e-a2c2-6a46b4bcca7e service nova] Lock "c28e7c21-7e7d-4cda-81e8-63538bd8a1f7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1883.509737] env[63024]: DEBUG oslo_concurrency.lockutils [req-002617d9-1920-4945-97be-ba2e857855ef req-4590eea6-ddb0-491e-a2c2-6a46b4bcca7e service nova] Lock "c28e7c21-7e7d-4cda-81e8-63538bd8a1f7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1883.509895] env[63024]: DEBUG nova.compute.manager [req-002617d9-1920-4945-97be-ba2e857855ef req-4590eea6-ddb0-491e-a2c2-6a46b4bcca7e service nova] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] No waiting events found dispatching network-vif-plugged-749aba46-5057-4a6a-8e7c-f7df42b7d129 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1883.510059] env[63024]: WARNING nova.compute.manager [req-002617d9-1920-4945-97be-ba2e857855ef req-4590eea6-ddb0-491e-a2c2-6a46b4bcca7e service nova] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Received unexpected event network-vif-plugged-749aba46-5057-4a6a-8e7c-f7df42b7d129 for instance with vm_state building and task_state spawning. [ 1883.510210] env[63024]: DEBUG nova.compute.manager [req-002617d9-1920-4945-97be-ba2e857855ef req-4590eea6-ddb0-491e-a2c2-6a46b4bcca7e service nova] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Received event network-changed-749aba46-5057-4a6a-8e7c-f7df42b7d129 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1883.511025] env[63024]: DEBUG nova.compute.manager [req-002617d9-1920-4945-97be-ba2e857855ef req-4590eea6-ddb0-491e-a2c2-6a46b4bcca7e service nova] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Refreshing instance network info cache due to event network-changed-749aba46-5057-4a6a-8e7c-f7df42b7d129. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1883.511025] env[63024]: DEBUG oslo_concurrency.lockutils [req-002617d9-1920-4945-97be-ba2e857855ef req-4590eea6-ddb0-491e-a2c2-6a46b4bcca7e service nova] Acquiring lock "refresh_cache-c28e7c21-7e7d-4cda-81e8-63538bd8a1f7" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1883.547119] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 51532b8e-4adf-4cc7-b91e-885d7934a7e8] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 1883.618210] env[63024]: DEBUG oslo_vmware.api [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951202, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.719651] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4a8357c2-d3bd-46b3-9705-23a858a5bc94 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.284s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1883.722674] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3a2d40d1-5d48-4100-8b49-bb24717ed551 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 14.219s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1883.747862] env[63024]: DEBUG oslo_concurrency.lockutils [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Releasing lock "refresh_cache-c28e7c21-7e7d-4cda-81e8-63538bd8a1f7" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1883.748741] env[63024]: DEBUG nova.compute.manager [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Instance network_info: |[{"id": "749aba46-5057-4a6a-8e7c-f7df42b7d129", "address": "fa:16:3e:8b:26:58", "network": {"id": "8b46e914-e3e8-40c4-8f9d-01d1f97bf25e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1558746173-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bbfeec6d47746328f185acd132e0d5a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afae6acd-1873-4228-9d5a-1cd5d4efe3e4", "external-id": "nsx-vlan-transportzone-183", "segmentation_id": 183, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap749aba46-50", "ovs_interfaceid": "749aba46-5057-4a6a-8e7c-f7df42b7d129", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1883.749855] env[63024]: INFO nova.scheduler.client.report [None req-4a8357c2-d3bd-46b3-9705-23a858a5bc94 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Deleted allocations for instance e0a37f54-14ca-4eea-a9b3-6e652ca1e48d [ 1883.751083] env[63024]: DEBUG oslo_concurrency.lockutils [req-002617d9-1920-4945-97be-ba2e857855ef req-4590eea6-ddb0-491e-a2c2-6a46b4bcca7e service nova] Acquired lock "refresh_cache-c28e7c21-7e7d-4cda-81e8-63538bd8a1f7" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1883.751392] env[63024]: DEBUG nova.network.neutron [req-002617d9-1920-4945-97be-ba2e857855ef req-4590eea6-ddb0-491e-a2c2-6a46b4bcca7e service nova] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Refreshing network info cache for port 749aba46-5057-4a6a-8e7c-f7df42b7d129 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1883.752720] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8b:26:58', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'afae6acd-1873-4228-9d5a-1cd5d4efe3e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '749aba46-5057-4a6a-8e7c-f7df42b7d129', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1883.764203] env[63024]: DEBUG oslo.service.loopingcall [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1883.770821] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1883.771621] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-41414d88-989d-45ee-a0c5-838771132a30 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.813463] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1883.813463] env[63024]: value = "task-1951204" [ 1883.813463] env[63024]: _type = "Task" [ 1883.813463] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1883.820256] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951204, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.932793] env[63024]: DEBUG oslo_vmware.api [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951203, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1883.982113] env[63024]: DEBUG oslo_vmware.api [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e5f201-eeea-8a1d-9c5d-fe08ed4d9d69, 'name': SearchDatastore_Task, 'duration_secs': 0.011972} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1883.982397] env[63024]: DEBUG oslo_concurrency.lockutils [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1883.982675] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 9edbda30-2e28-4961-a6ad-5ab34c40ed44/9edbda30-2e28-4961-a6ad-5ab34c40ed44.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1883.982960] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e8134d97-0e8b-479c-a720-594a02594f15 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1883.991137] env[63024]: DEBUG oslo_vmware.api [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Waiting for the task: (returnval){ [ 1883.991137] env[63024]: value = "task-1951205" [ 1883.991137] env[63024]: _type = "Task" [ 1883.991137] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1883.999504] env[63024]: DEBUG oslo_vmware.api [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951205, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.051066] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 61fdfa06-cb40-44a3-8abc-428b26bd40f5] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 1884.117265] env[63024]: DEBUG oslo_vmware.api [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951202, 'name': PowerOnVM_Task, 'duration_secs': 0.605963} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1884.117265] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1884.117265] env[63024]: INFO nova.compute.manager [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Took 8.64 seconds to spawn the instance on the hypervisor. [ 1884.117265] env[63024]: DEBUG nova.compute.manager [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1884.121779] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce0ef2de-c23c-4b15-9d4f-0f73e7fb2a34 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.122113] env[63024]: DEBUG nova.network.neutron [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Successfully updated port: ece34716-8aa8-4585-95d9-ccb4eaca0ae6 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1884.250733] env[63024]: DEBUG nova.network.neutron [req-002617d9-1920-4945-97be-ba2e857855ef req-4590eea6-ddb0-491e-a2c2-6a46b4bcca7e service nova] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Updated VIF entry in instance network info cache for port 749aba46-5057-4a6a-8e7c-f7df42b7d129. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1884.251187] env[63024]: DEBUG nova.network.neutron [req-002617d9-1920-4945-97be-ba2e857855ef req-4590eea6-ddb0-491e-a2c2-6a46b4bcca7e service nova] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Updating instance_info_cache with network_info: [{"id": "749aba46-5057-4a6a-8e7c-f7df42b7d129", "address": "fa:16:3e:8b:26:58", "network": {"id": "8b46e914-e3e8-40c4-8f9d-01d1f97bf25e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1558746173-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bbfeec6d47746328f185acd132e0d5a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afae6acd-1873-4228-9d5a-1cd5d4efe3e4", "external-id": "nsx-vlan-transportzone-183", "segmentation_id": 183, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap749aba46-50", "ovs_interfaceid": "749aba46-5057-4a6a-8e7c-f7df42b7d129", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1884.276653] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4a8357c2-d3bd-46b3-9705-23a858a5bc94 tempest-ServersTestMultiNic-1858159583 tempest-ServersTestMultiNic-1858159583-project-member] Lock "e0a37f54-14ca-4eea-a9b3-6e652ca1e48d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.591s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1884.330118] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951204, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.443134] env[63024]: DEBUG oslo_vmware.api [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951203, 'name': PowerOnVM_Task, 'duration_secs': 1.012766} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1884.447275] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1884.447518] env[63024]: INFO nova.compute.manager [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Took 11.63 seconds to spawn the instance on the hypervisor. [ 1884.447695] env[63024]: DEBUG nova.compute.manager [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1884.449496] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-498b5064-8971-4400-b869-ff0163cbec57 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.505454] env[63024]: DEBUG oslo_vmware.api [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951205, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.554665] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 22ef5bae-f7bc-43c7-9d77-1b4547e83b24] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 1884.628702] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "refresh_cache-56d220f3-b97c-4cbe-b582-c4a4f1171472" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1884.630446] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquired lock "refresh_cache-56d220f3-b97c-4cbe-b582-c4a4f1171472" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1884.630446] env[63024]: DEBUG nova.network.neutron [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1884.656397] env[63024]: INFO nova.compute.manager [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Took 34.99 seconds to build instance. [ 1884.757391] env[63024]: DEBUG oslo_concurrency.lockutils [req-002617d9-1920-4945-97be-ba2e857855ef req-4590eea6-ddb0-491e-a2c2-6a46b4bcca7e service nova] Releasing lock "refresh_cache-c28e7c21-7e7d-4cda-81e8-63538bd8a1f7" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1884.828523] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951204, 'name': CreateVM_Task, 'duration_secs': 0.729044} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1884.828523] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1884.829015] env[63024]: DEBUG oslo_concurrency.lockutils [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1884.829183] env[63024]: DEBUG oslo_concurrency.lockutils [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1884.829501] env[63024]: DEBUG oslo_concurrency.lockutils [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1884.829898] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce38ced7-303d-457c-951d-f90d4d74e237 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.833185] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a021043c-da36-4fef-a545-b16f5c9dc50b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.836705] env[63024]: DEBUG oslo_vmware.api [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1884.836705] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5248e69c-668c-aa92-b3d1-7879735c65ca" [ 1884.836705] env[63024]: _type = "Task" [ 1884.836705] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1884.843057] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e677979-d4ba-415d-b03c-48b3bbda6da9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.849261] env[63024]: DEBUG oslo_vmware.api [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5248e69c-668c-aa92-b3d1-7879735c65ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1884.880909] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fecb7bfa-ca4e-4aad-a830-1d25474676d7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.891428] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26b78dcd-2eb8-44d7-9586-9a797cafdbc4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1884.904204] env[63024]: DEBUG nova.compute.provider_tree [None req-3a2d40d1-5d48-4100-8b49-bb24717ed551 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1884.982405] env[63024]: INFO nova.compute.manager [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Took 38.73 seconds to build instance. [ 1885.009486] env[63024]: DEBUG oslo_vmware.api [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951205, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.063591] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: b629b4f8-f79f-4361-b78c-8705a6888a9e] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 1885.159977] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94beeca9-d1ad-48a4-8cd9-cfad54b84d94 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Lock "9ca6342c-55bd-4c78-9fa6-3caf4ec744bc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 67.895s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1885.184964] env[63024]: DEBUG nova.network.neutron [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1885.348696] env[63024]: DEBUG oslo_vmware.api [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5248e69c-668c-aa92-b3d1-7879735c65ca, 'name': SearchDatastore_Task, 'duration_secs': 0.013468} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1885.349029] env[63024]: DEBUG oslo_concurrency.lockutils [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1885.350601] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1885.350601] env[63024]: DEBUG oslo_concurrency.lockutils [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1885.350601] env[63024]: DEBUG oslo_concurrency.lockutils [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1885.350601] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1885.350601] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-37ab5469-19c5-42b3-9273-1382b70afe51 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.372920] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fb3584f7-6b03-4b63-b9a7-a34c0829e8eb tempest-ServersAdminTestJSON-2129746601 tempest-ServersAdminTestJSON-2129746601-project-admin] Acquiring lock "refresh_cache-9ca6342c-55bd-4c78-9fa6-3caf4ec744bc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1885.373088] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fb3584f7-6b03-4b63-b9a7-a34c0829e8eb tempest-ServersAdminTestJSON-2129746601 tempest-ServersAdminTestJSON-2129746601-project-admin] Acquired lock "refresh_cache-9ca6342c-55bd-4c78-9fa6-3caf4ec744bc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1885.373251] env[63024]: DEBUG nova.network.neutron [None req-fb3584f7-6b03-4b63-b9a7-a34c0829e8eb tempest-ServersAdminTestJSON-2129746601 tempest-ServersAdminTestJSON-2129746601-project-admin] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1885.376210] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1885.376381] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1885.377309] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f54a6c6a-dff1-45a9-b2b8-1347c1a79afc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.385112] env[63024]: DEBUG oslo_vmware.api [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1885.385112] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52fec007-1463-c17c-858b-55aa4bce67c6" [ 1885.385112] env[63024]: _type = "Task" [ 1885.385112] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1885.397150] env[63024]: DEBUG oslo_vmware.api [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52fec007-1463-c17c-858b-55aa4bce67c6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.404312] env[63024]: DEBUG nova.network.neutron [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Updating instance_info_cache with network_info: [{"id": "ece34716-8aa8-4585-95d9-ccb4eaca0ae6", "address": "fa:16:3e:17:50:dd", "network": {"id": "b800daa8-f0f9-4823-92b0-ff8d853cc0ff", "bridge": "br-int", "label": "tempest-ServersTestJSON-1406445940-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18540818b60e4483963d14559bc5c38d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec763be6-4041-4651-8fd7-3820cf0ab86d", "external-id": "nsx-vlan-transportzone-943", "segmentation_id": 943, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapece34716-8a", "ovs_interfaceid": "ece34716-8aa8-4585-95d9-ccb4eaca0ae6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1885.407913] env[63024]: DEBUG nova.scheduler.client.report [None req-3a2d40d1-5d48-4100-8b49-bb24717ed551 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1885.484533] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9a8032e3-b13a-4dea-9d5a-0508c22aeaf9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "0f371c69-c7ae-4649-b038-be82e8ca74e1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.920s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1885.506866] env[63024]: DEBUG oslo_vmware.api [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951205, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.566655] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: ccd80e20-9fc2-415a-a428-fcf85994c7f8] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 1885.897949] env[63024]: DEBUG oslo_vmware.api [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52fec007-1463-c17c-858b-55aa4bce67c6, 'name': SearchDatastore_Task, 'duration_secs': 0.020438} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1885.903173] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-581ad30b-2d7e-4d88-a641-bd8388140eca {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.908263] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Releasing lock "refresh_cache-56d220f3-b97c-4cbe-b582-c4a4f1171472" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1885.908660] env[63024]: DEBUG nova.compute.manager [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Instance network_info: |[{"id": "ece34716-8aa8-4585-95d9-ccb4eaca0ae6", "address": "fa:16:3e:17:50:dd", "network": {"id": "b800daa8-f0f9-4823-92b0-ff8d853cc0ff", "bridge": "br-int", "label": "tempest-ServersTestJSON-1406445940-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18540818b60e4483963d14559bc5c38d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec763be6-4041-4651-8fd7-3820cf0ab86d", "external-id": "nsx-vlan-transportzone-943", "segmentation_id": 943, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapece34716-8a", "ovs_interfaceid": "ece34716-8aa8-4585-95d9-ccb4eaca0ae6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1885.910717] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:17:50:dd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ec763be6-4041-4651-8fd7-3820cf0ab86d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ece34716-8aa8-4585-95d9-ccb4eaca0ae6', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1885.923326] env[63024]: DEBUG oslo.service.loopingcall [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1885.923641] env[63024]: DEBUG oslo_vmware.api [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1885.923641] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5233d6be-b7ba-d442-7768-c3b09f475b56" [ 1885.923641] env[63024]: _type = "Task" [ 1885.923641] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1885.927754] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1885.928085] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f33d82e0-4d06-4a1f-b6b8-89093bfab789 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.958065] env[63024]: DEBUG oslo_vmware.api [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5233d6be-b7ba-d442-7768-c3b09f475b56, 'name': SearchDatastore_Task, 'duration_secs': 0.042705} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1885.959474] env[63024]: DEBUG oslo_concurrency.lockutils [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1885.959741] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] c28e7c21-7e7d-4cda-81e8-63538bd8a1f7/c28e7c21-7e7d-4cda-81e8-63538bd8a1f7.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1885.959990] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1885.959990] env[63024]: value = "task-1951206" [ 1885.959990] env[63024]: _type = "Task" [ 1885.959990] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1885.960451] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0f5f6e9d-f508-4d4f-baa0-ab38c7517912 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1885.971074] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951206, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1885.972566] env[63024]: DEBUG oslo_vmware.api [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1885.972566] env[63024]: value = "task-1951207" [ 1885.972566] env[63024]: _type = "Task" [ 1885.972566] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1885.983515] env[63024]: DEBUG oslo_vmware.api [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951207, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.004019] env[63024]: DEBUG oslo_vmware.api [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951205, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.565103} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1886.004314] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 9edbda30-2e28-4961-a6ad-5ab34c40ed44/9edbda30-2e28-4961-a6ad-5ab34c40ed44.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1886.004529] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1886.004787] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-93676129-9201-40f7-b38b-11d95f7470b3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.015856] env[63024]: DEBUG oslo_vmware.api [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Waiting for the task: (returnval){ [ 1886.015856] env[63024]: value = "task-1951208" [ 1886.015856] env[63024]: _type = "Task" [ 1886.015856] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1886.030910] env[63024]: DEBUG oslo_vmware.api [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951208, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.046632] env[63024]: DEBUG nova.compute.manager [req-fe28ebce-a92d-4701-899b-5bf9e4b0ea97 req-c4f0c166-8296-441a-958b-b28203e6ff54 service nova] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Received event network-vif-plugged-ece34716-8aa8-4585-95d9-ccb4eaca0ae6 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1886.047265] env[63024]: DEBUG oslo_concurrency.lockutils [req-fe28ebce-a92d-4701-899b-5bf9e4b0ea97 req-c4f0c166-8296-441a-958b-b28203e6ff54 service nova] Acquiring lock "56d220f3-b97c-4cbe-b582-c4a4f1171472-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1886.047265] env[63024]: DEBUG oslo_concurrency.lockutils [req-fe28ebce-a92d-4701-899b-5bf9e4b0ea97 req-c4f0c166-8296-441a-958b-b28203e6ff54 service nova] Lock "56d220f3-b97c-4cbe-b582-c4a4f1171472-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1886.047585] env[63024]: DEBUG oslo_concurrency.lockutils [req-fe28ebce-a92d-4701-899b-5bf9e4b0ea97 req-c4f0c166-8296-441a-958b-b28203e6ff54 service nova] Lock "56d220f3-b97c-4cbe-b582-c4a4f1171472-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1886.047666] env[63024]: DEBUG nova.compute.manager [req-fe28ebce-a92d-4701-899b-5bf9e4b0ea97 req-c4f0c166-8296-441a-958b-b28203e6ff54 service nova] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] No waiting events found dispatching network-vif-plugged-ece34716-8aa8-4585-95d9-ccb4eaca0ae6 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1886.047784] env[63024]: WARNING nova.compute.manager [req-fe28ebce-a92d-4701-899b-5bf9e4b0ea97 req-c4f0c166-8296-441a-958b-b28203e6ff54 service nova] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Received unexpected event network-vif-plugged-ece34716-8aa8-4585-95d9-ccb4eaca0ae6 for instance with vm_state building and task_state spawning. [ 1886.047982] env[63024]: DEBUG nova.compute.manager [req-fe28ebce-a92d-4701-899b-5bf9e4b0ea97 req-c4f0c166-8296-441a-958b-b28203e6ff54 service nova] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Received event network-changed-ece34716-8aa8-4585-95d9-ccb4eaca0ae6 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1886.048217] env[63024]: DEBUG nova.compute.manager [req-fe28ebce-a92d-4701-899b-5bf9e4b0ea97 req-c4f0c166-8296-441a-958b-b28203e6ff54 service nova] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Refreshing instance network info cache due to event network-changed-ece34716-8aa8-4585-95d9-ccb4eaca0ae6. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1886.048424] env[63024]: DEBUG oslo_concurrency.lockutils [req-fe28ebce-a92d-4701-899b-5bf9e4b0ea97 req-c4f0c166-8296-441a-958b-b28203e6ff54 service nova] Acquiring lock "refresh_cache-56d220f3-b97c-4cbe-b582-c4a4f1171472" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1886.048559] env[63024]: DEBUG oslo_concurrency.lockutils [req-fe28ebce-a92d-4701-899b-5bf9e4b0ea97 req-c4f0c166-8296-441a-958b-b28203e6ff54 service nova] Acquired lock "refresh_cache-56d220f3-b97c-4cbe-b582-c4a4f1171472" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1886.048707] env[63024]: DEBUG nova.network.neutron [req-fe28ebce-a92d-4701-899b-5bf9e4b0ea97 req-c4f0c166-8296-441a-958b-b28203e6ff54 service nova] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Refreshing network info cache for port ece34716-8aa8-4585-95d9-ccb4eaca0ae6 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1886.069944] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: b0b4d94c-cd5c-4452-baa6-9aeec46b43ad] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 1886.460677] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3a2d40d1-5d48-4100-8b49-bb24717ed551 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.736s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1886.462805] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d64135b2-184f-408a-9c8d-81eaf3539cff tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.495s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1886.462944] env[63024]: DEBUG nova.objects.instance [None req-d64135b2-184f-408a-9c8d-81eaf3539cff tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Lazy-loading 'resources' on Instance uuid 52c17abc-78f0-417b-8675-e8d62bc8baa3 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1886.482048] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951206, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.495248] env[63024]: DEBUG oslo_vmware.api [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951207, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.528639] env[63024]: DEBUG oslo_vmware.api [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951208, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.089644} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1886.529061] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1886.530158] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cef0888f-5072-4ded-9ac2-b82fc1e6084b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.561846] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] 9edbda30-2e28-4961-a6ad-5ab34c40ed44/9edbda30-2e28-4961-a6ad-5ab34c40ed44.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1886.564632] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1f253cbb-53b3-42b7-b7b0-62a9cf47edc0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1886.582408] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: f90f35a2-f2ee-45e2-a9e4-afce50f29aa0] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 1886.585690] env[63024]: DEBUG nova.network.neutron [None req-fb3584f7-6b03-4b63-b9a7-a34c0829e8eb tempest-ServersAdminTestJSON-2129746601 tempest-ServersAdminTestJSON-2129746601-project-admin] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Updating instance_info_cache with network_info: [{"id": "360ebc0c-7601-4e8c-87a5-65b79b2ae569", "address": "fa:16:3e:6b:9f:39", "network": {"id": "a1a2ebf5-0a3e-4f93-9a47-bd8cdab108ce", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1557111633-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5577b40f56af44eebd47761192e9510f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap360ebc0c-76", "ovs_interfaceid": "360ebc0c-7601-4e8c-87a5-65b79b2ae569", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1886.595030] env[63024]: DEBUG oslo_vmware.api [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Waiting for the task: (returnval){ [ 1886.595030] env[63024]: value = "task-1951209" [ 1886.595030] env[63024]: _type = "Task" [ 1886.595030] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1886.604800] env[63024]: DEBUG oslo_vmware.api [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951209, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1886.997043] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951206, 'name': CreateVM_Task, 'duration_secs': 0.661748} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1886.998387] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1886.998722] env[63024]: DEBUG oslo_vmware.api [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951207, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.615709} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1887.005086] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1887.005318] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1887.005650] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1887.005942] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] c28e7c21-7e7d-4cda-81e8-63538bd8a1f7/c28e7c21-7e7d-4cda-81e8-63538bd8a1f7.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1887.006167] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1887.007400] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-44e21951-f0d8-4e55-a0d6-b1e019c01058 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.008604] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9afda7ea-4977-4379-8725-0936e40687ab {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.016651] env[63024]: DEBUG oslo_vmware.api [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 1887.016651] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]524be185-36cd-a304-6e2b-645d4fbd8997" [ 1887.016651] env[63024]: _type = "Task" [ 1887.016651] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1887.022963] env[63024]: DEBUG oslo_vmware.api [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1887.022963] env[63024]: value = "task-1951210" [ 1887.022963] env[63024]: _type = "Task" [ 1887.022963] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1887.031860] env[63024]: DEBUG oslo_vmware.api [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]524be185-36cd-a304-6e2b-645d4fbd8997, 'name': SearchDatastore_Task} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1887.036473] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1887.037147] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1887.037549] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1887.037722] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1887.037907] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1887.038557] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b1daaaea-e7d5-49a0-9713-842b5c52714f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.046365] env[63024]: DEBUG oslo_vmware.api [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951210, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.055955] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1887.055955] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1887.056929] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd17757d-d196-4b6a-8914-ad3108eebe1b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.063332] env[63024]: DEBUG oslo_vmware.api [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 1887.063332] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]529b1078-303b-5308-284a-6e9251b3c5ee" [ 1887.063332] env[63024]: _type = "Task" [ 1887.063332] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1887.073322] env[63024]: DEBUG oslo_vmware.api [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]529b1078-303b-5308-284a-6e9251b3c5ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.074684] env[63024]: INFO nova.scheduler.client.report [None req-3a2d40d1-5d48-4100-8b49-bb24717ed551 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Deleted allocation for migration 787068cf-2789-4013-8b27-8a10a4f14022 [ 1887.088767] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fb3584f7-6b03-4b63-b9a7-a34c0829e8eb tempest-ServersAdminTestJSON-2129746601 tempest-ServersAdminTestJSON-2129746601-project-admin] Releasing lock "refresh_cache-9ca6342c-55bd-4c78-9fa6-3caf4ec744bc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1887.092364] env[63024]: DEBUG nova.compute.manager [None req-fb3584f7-6b03-4b63-b9a7-a34c0829e8eb tempest-ServersAdminTestJSON-2129746601 tempest-ServersAdminTestJSON-2129746601-project-admin] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Inject network info {{(pid=63024) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7549}} [ 1887.092364] env[63024]: DEBUG nova.compute.manager [None req-fb3584f7-6b03-4b63-b9a7-a34c0829e8eb tempest-ServersAdminTestJSON-2129746601 tempest-ServersAdminTestJSON-2129746601-project-admin] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] network_info to inject: |[{"id": "360ebc0c-7601-4e8c-87a5-65b79b2ae569", "address": "fa:16:3e:6b:9f:39", "network": {"id": "a1a2ebf5-0a3e-4f93-9a47-bd8cdab108ce", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-1557111633-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5577b40f56af44eebd47761192e9510f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cf5bfbae-a882-4d34-be33-b31e274b3077", "external-id": "nsx-vlan-transportzone-556", "segmentation_id": 556, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap360ebc0c-76", "ovs_interfaceid": "360ebc0c-7601-4e8c-87a5-65b79b2ae569", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7550}} [ 1887.095946] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-fb3584f7-6b03-4b63-b9a7-a34c0829e8eb tempest-ServersAdminTestJSON-2129746601 tempest-ServersAdminTestJSON-2129746601-project-admin] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Reconfiguring VM instance to set the machine id {{(pid=63024) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1887.096485] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1887.096628] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Cleaning up deleted instances with incomplete migration {{(pid=63024) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11596}} [ 1887.098293] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-75cf2f98-b6f6-4cba-936a-ce7dc0823f8b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.124251] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1887.132924] env[63024]: DEBUG oslo_vmware.api [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951209, 'name': ReconfigVM_Task, 'duration_secs': 0.351849} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1887.134465] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] Reconfigured VM instance instance-0000004c to attach disk [datastore1] 9edbda30-2e28-4961-a6ad-5ab34c40ed44/9edbda30-2e28-4961-a6ad-5ab34c40ed44.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1887.135540] env[63024]: DEBUG oslo_vmware.api [None req-fb3584f7-6b03-4b63-b9a7-a34c0829e8eb tempest-ServersAdminTestJSON-2129746601 tempest-ServersAdminTestJSON-2129746601-project-admin] Waiting for the task: (returnval){ [ 1887.135540] env[63024]: value = "task-1951211" [ 1887.135540] env[63024]: _type = "Task" [ 1887.135540] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1887.135762] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f95a26ac-5e5f-4c0b-9b0f-f6b8d64e7490 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.148177] env[63024]: DEBUG oslo_vmware.api [None req-fb3584f7-6b03-4b63-b9a7-a34c0829e8eb tempest-ServersAdminTestJSON-2129746601 tempest-ServersAdminTestJSON-2129746601-project-admin] Task: {'id': task-1951211, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.149970] env[63024]: DEBUG oslo_vmware.api [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Waiting for the task: (returnval){ [ 1887.149970] env[63024]: value = "task-1951212" [ 1887.149970] env[63024]: _type = "Task" [ 1887.149970] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1887.166903] env[63024]: DEBUG oslo_vmware.api [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951212, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.314252] env[63024]: DEBUG nova.network.neutron [req-fe28ebce-a92d-4701-899b-5bf9e4b0ea97 req-c4f0c166-8296-441a-958b-b28203e6ff54 service nova] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Updated VIF entry in instance network info cache for port ece34716-8aa8-4585-95d9-ccb4eaca0ae6. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1887.314605] env[63024]: DEBUG nova.network.neutron [req-fe28ebce-a92d-4701-899b-5bf9e4b0ea97 req-c4f0c166-8296-441a-958b-b28203e6ff54 service nova] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Updating instance_info_cache with network_info: [{"id": "ece34716-8aa8-4585-95d9-ccb4eaca0ae6", "address": "fa:16:3e:17:50:dd", "network": {"id": "b800daa8-f0f9-4823-92b0-ff8d853cc0ff", "bridge": "br-int", "label": "tempest-ServersTestJSON-1406445940-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18540818b60e4483963d14559bc5c38d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec763be6-4041-4651-8fd7-3820cf0ab86d", "external-id": "nsx-vlan-transportzone-943", "segmentation_id": 943, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapece34716-8a", "ovs_interfaceid": "ece34716-8aa8-4585-95d9-ccb4eaca0ae6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1887.545854] env[63024]: DEBUG oslo_vmware.api [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951210, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072971} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1887.545854] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1887.546015] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e351dbe-9e85-481f-a944-dccf1fb92f56 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.574015] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] c28e7c21-7e7d-4cda-81e8-63538bd8a1f7/c28e7c21-7e7d-4cda-81e8-63538bd8a1f7.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1887.574410] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f7c5e343-e348-4860-b73f-4feb2588a3a2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.598696] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3a2d40d1-5d48-4100-8b49-bb24717ed551 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "1709d916-d0c4-4706-b41b-8b0ed25f3331" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 21.504s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1887.608367] env[63024]: DEBUG oslo_vmware.api [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]529b1078-303b-5308-284a-6e9251b3c5ee, 'name': SearchDatastore_Task, 'duration_secs': 0.011873} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1887.610556] env[63024]: DEBUG oslo_vmware.api [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1887.610556] env[63024]: value = "task-1951213" [ 1887.610556] env[63024]: _type = "Task" [ 1887.610556] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1887.611478] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21672b1b-4685-406b-b3b9-49002a2681c5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.624856] env[63024]: DEBUG oslo_vmware.api [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 1887.624856] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5235a3ff-81b6-f3a1-1e38-384257712e3d" [ 1887.624856] env[63024]: _type = "Task" [ 1887.624856] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1887.630473] env[63024]: DEBUG oslo_vmware.api [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951213, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.641251] env[63024]: DEBUG oslo_vmware.api [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5235a3ff-81b6-f3a1-1e38-384257712e3d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.654160] env[63024]: DEBUG oslo_vmware.api [None req-fb3584f7-6b03-4b63-b9a7-a34c0829e8eb tempest-ServersAdminTestJSON-2129746601 tempest-ServersAdminTestJSON-2129746601-project-admin] Task: {'id': task-1951211, 'name': ReconfigVM_Task, 'duration_secs': 0.271669} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1887.660583] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-fb3584f7-6b03-4b63-b9a7-a34c0829e8eb tempest-ServersAdminTestJSON-2129746601 tempest-ServersAdminTestJSON-2129746601-project-admin] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Reconfigured VM instance to set the machine id {{(pid=63024) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1887.667027] env[63024]: DEBUG oslo_vmware.api [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951212, 'name': Rename_Task, 'duration_secs': 0.16434} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1887.667027] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1887.667175] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2769495a-b921-4f59-93ca-1b48ff3dc43e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.674700] env[63024]: DEBUG oslo_vmware.api [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Waiting for the task: (returnval){ [ 1887.674700] env[63024]: value = "task-1951214" [ 1887.674700] env[63024]: _type = "Task" [ 1887.674700] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1887.691035] env[63024]: DEBUG oslo_vmware.api [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951214, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1887.738208] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1ad7690-298e-4532-81be-446d0c76bdba {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.748931] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b0fe12b-34b7-4c65-ad58-aa3e8f8b11ac {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.787792] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f504161f-d7b5-434b-98b4-cef39f7027ab {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.797029] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7dac321-cf9c-4f76-a58b-335bd9863fa8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1887.814056] env[63024]: DEBUG nova.compute.provider_tree [None req-d64135b2-184f-408a-9c8d-81eaf3539cff tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1887.822928] env[63024]: DEBUG oslo_concurrency.lockutils [req-fe28ebce-a92d-4701-899b-5bf9e4b0ea97 req-c4f0c166-8296-441a-958b-b28203e6ff54 service nova] Releasing lock "refresh_cache-56d220f3-b97c-4cbe-b582-c4a4f1171472" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1888.131266] env[63024]: DEBUG oslo_vmware.api [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951213, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.148680] env[63024]: DEBUG oslo_vmware.api [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5235a3ff-81b6-f3a1-1e38-384257712e3d, 'name': SearchDatastore_Task, 'duration_secs': 0.019176} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1888.148680] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1888.148680] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 56d220f3-b97c-4cbe-b582-c4a4f1171472/56d220f3-b97c-4cbe-b582-c4a4f1171472.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1888.148680] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-605d865d-b896-4d87-af61-4e43da2e51b9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.157491] env[63024]: DEBUG nova.compute.manager [req-ea5740a2-ecda-4d2d-aed9-69c6c2a5817e req-e53a1182-2cf0-4325-ae8e-be3c3781bb60 service nova] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Received event network-changed-c48bb2e4-b1fe-46e8-9eaf-75fcb7827e8c {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1888.157683] env[63024]: DEBUG nova.compute.manager [req-ea5740a2-ecda-4d2d-aed9-69c6c2a5817e req-e53a1182-2cf0-4325-ae8e-be3c3781bb60 service nova] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Refreshing instance network info cache due to event network-changed-c48bb2e4-b1fe-46e8-9eaf-75fcb7827e8c. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1888.157890] env[63024]: DEBUG oslo_concurrency.lockutils [req-ea5740a2-ecda-4d2d-aed9-69c6c2a5817e req-e53a1182-2cf0-4325-ae8e-be3c3781bb60 service nova] Acquiring lock "refresh_cache-0f371c69-c7ae-4649-b038-be82e8ca74e1" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1888.158033] env[63024]: DEBUG oslo_concurrency.lockutils [req-ea5740a2-ecda-4d2d-aed9-69c6c2a5817e req-e53a1182-2cf0-4325-ae8e-be3c3781bb60 service nova] Acquired lock "refresh_cache-0f371c69-c7ae-4649-b038-be82e8ca74e1" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1888.158194] env[63024]: DEBUG nova.network.neutron [req-ea5740a2-ecda-4d2d-aed9-69c6c2a5817e req-e53a1182-2cf0-4325-ae8e-be3c3781bb60 service nova] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Refreshing network info cache for port c48bb2e4-b1fe-46e8-9eaf-75fcb7827e8c {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1888.168462] env[63024]: DEBUG oslo_vmware.api [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 1888.168462] env[63024]: value = "task-1951215" [ 1888.168462] env[63024]: _type = "Task" [ 1888.168462] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1888.178802] env[63024]: DEBUG oslo_vmware.api [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951215, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.188879] env[63024]: DEBUG oslo_vmware.api [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951214, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.338585] env[63024]: ERROR nova.scheduler.client.report [None req-d64135b2-184f-408a-9c8d-81eaf3539cff tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [req-5b2a0724-d7e6-4150-bc4e-2ef0decd9566] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 89dfa68a-133e-436f-a9f1-86051f9fb96b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-5b2a0724-d7e6-4150-bc4e-2ef0decd9566"}]} [ 1888.357067] env[63024]: DEBUG nova.scheduler.client.report [None req-d64135b2-184f-408a-9c8d-81eaf3539cff tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Refreshing inventories for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1888.376903] env[63024]: DEBUG nova.scheduler.client.report [None req-d64135b2-184f-408a-9c8d-81eaf3539cff tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Updating ProviderTree inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1888.377146] env[63024]: DEBUG nova.compute.provider_tree [None req-d64135b2-184f-408a-9c8d-81eaf3539cff tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1888.390500] env[63024]: DEBUG nova.scheduler.client.report [None req-d64135b2-184f-408a-9c8d-81eaf3539cff tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Refreshing aggregate associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, aggregates: None {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1888.428317] env[63024]: DEBUG nova.scheduler.client.report [None req-d64135b2-184f-408a-9c8d-81eaf3539cff tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Refreshing trait associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1888.629928] env[63024]: DEBUG oslo_vmware.api [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951213, 'name': ReconfigVM_Task, 'duration_secs': 0.726464} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1888.633662] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Reconfigured VM instance instance-0000004d to attach disk [datastore1] c28e7c21-7e7d-4cda-81e8-63538bd8a1f7/c28e7c21-7e7d-4cda-81e8-63538bd8a1f7.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1888.634750] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a4235c20-9912-4224-87a1-b96dade7bea8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.644143] env[63024]: DEBUG oslo_vmware.api [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1888.644143] env[63024]: value = "task-1951216" [ 1888.644143] env[63024]: _type = "Task" [ 1888.644143] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1888.660515] env[63024]: DEBUG oslo_vmware.api [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951216, 'name': Rename_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.679219] env[63024]: DEBUG oslo_vmware.api [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951215, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1888.694722] env[63024]: DEBUG oslo_vmware.api [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951214, 'name': PowerOnVM_Task, 'duration_secs': 0.641597} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1888.695015] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1888.696852] env[63024]: INFO nova.compute.manager [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] Took 10.58 seconds to spawn the instance on the hypervisor. [ 1888.697093] env[63024]: DEBUG nova.compute.manager [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1888.698051] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5e87bb8-ee70-4a0c-9fcd-1e6620290c32 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1888.977260] env[63024]: INFO nova.compute.manager [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Rebuilding instance [ 1888.992246] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd085648-f2b4-4af3-8d9c-c7f0489d93d1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.009376] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9413653a-bd2d-4c37-a84c-8ad3df84c587 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.054583] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8511856f-1fbe-4cff-8579-15b628c2a434 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.068030] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7170d69d-7005-49f6-be78-87e79e2e3fb9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.088682] env[63024]: DEBUG nova.compute.provider_tree [None req-d64135b2-184f-408a-9c8d-81eaf3539cff tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1889.089429] env[63024]: DEBUG nova.compute.manager [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1889.090238] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ee96ba7-416c-4d42-b0d8-1ee45f9e96a0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.124257] env[63024]: DEBUG nova.network.neutron [req-ea5740a2-ecda-4d2d-aed9-69c6c2a5817e req-e53a1182-2cf0-4325-ae8e-be3c3781bb60 service nova] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Updated VIF entry in instance network info cache for port c48bb2e4-b1fe-46e8-9eaf-75fcb7827e8c. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1889.125286] env[63024]: DEBUG nova.network.neutron [req-ea5740a2-ecda-4d2d-aed9-69c6c2a5817e req-e53a1182-2cf0-4325-ae8e-be3c3781bb60 service nova] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Updating instance_info_cache with network_info: [{"id": "c48bb2e4-b1fe-46e8-9eaf-75fcb7827e8c", "address": "fa:16:3e:a7:20:a6", "network": {"id": "0cbe22f7-6322-4d92-9a77-2753f6449a2d", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-366684254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.137", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6072e8931d9540ad8fe4a2b4b1ec782d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3ccbdbb-8b49-4a26-913f-2a448b72280f", "external-id": "nsx-vlan-transportzone-412", "segmentation_id": 412, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc48bb2e4-b1", "ovs_interfaceid": "c48bb2e4-b1fe-46e8-9eaf-75fcb7827e8c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1889.167129] env[63024]: DEBUG oslo_vmware.api [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951216, 'name': Rename_Task, 'duration_secs': 0.339374} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1889.167129] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1889.167129] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d9ed60c3-ccc1-49d3-93de-c8d49f55443c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.176825] env[63024]: DEBUG oslo_vmware.api [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1889.176825] env[63024]: value = "task-1951217" [ 1889.176825] env[63024]: _type = "Task" [ 1889.176825] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1889.181204] env[63024]: DEBUG oslo_vmware.api [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951215, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.597376} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1889.185613] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 56d220f3-b97c-4cbe-b582-c4a4f1171472/56d220f3-b97c-4cbe-b582-c4a4f1171472.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1889.185776] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1889.192329] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c0ba84c7-ac96-408d-aee1-a2eba8e85dff {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.203779] env[63024]: DEBUG oslo_vmware.api [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 1889.203779] env[63024]: value = "task-1951218" [ 1889.203779] env[63024]: _type = "Task" [ 1889.203779] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1889.204400] env[63024]: DEBUG oslo_vmware.api [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951217, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.214131] env[63024]: DEBUG oslo_vmware.api [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951218, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.226408] env[63024]: INFO nova.compute.manager [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] Took 38.45 seconds to build instance. [ 1889.628537] env[63024]: DEBUG oslo_concurrency.lockutils [req-ea5740a2-ecda-4d2d-aed9-69c6c2a5817e req-e53a1182-2cf0-4325-ae8e-be3c3781bb60 service nova] Releasing lock "refresh_cache-0f371c69-c7ae-4649-b038-be82e8ca74e1" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1889.632611] env[63024]: DEBUG nova.scheduler.client.report [None req-d64135b2-184f-408a-9c8d-81eaf3539cff tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Updated inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with generation 112 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1889.632974] env[63024]: DEBUG nova.compute.provider_tree [None req-d64135b2-184f-408a-9c8d-81eaf3539cff tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Updating resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b generation from 112 to 113 during operation: update_inventory {{(pid=63024) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1889.633154] env[63024]: DEBUG nova.compute.provider_tree [None req-d64135b2-184f-408a-9c8d-81eaf3539cff tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1889.649851] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4438d070-28f0-4340-800d-87807eee6378 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "1709d916-d0c4-4706-b41b-8b0ed25f3331" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1889.650134] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4438d070-28f0-4340-800d-87807eee6378 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "1709d916-d0c4-4706-b41b-8b0ed25f3331" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1889.650341] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4438d070-28f0-4340-800d-87807eee6378 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "1709d916-d0c4-4706-b41b-8b0ed25f3331-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1889.651069] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4438d070-28f0-4340-800d-87807eee6378 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "1709d916-d0c4-4706-b41b-8b0ed25f3331-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1889.651069] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4438d070-28f0-4340-800d-87807eee6378 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "1709d916-d0c4-4706-b41b-8b0ed25f3331-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1889.657323] env[63024]: INFO nova.compute.manager [None req-4438d070-28f0-4340-800d-87807eee6378 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Terminating instance [ 1889.691690] env[63024]: DEBUG oslo_vmware.api [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951217, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1889.717793] env[63024]: DEBUG oslo_vmware.api [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951218, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077393} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1889.718171] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1889.719242] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd120bf9-2ee8-4aee-9636-2c165eb01aba {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.736852] env[63024]: DEBUG oslo_concurrency.lockutils [None req-67f24ef7-672c-4861-b9e8-9cecd1b27a42 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Lock "9edbda30-2e28-4961-a6ad-5ab34c40ed44" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.971s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1889.744207] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] 56d220f3-b97c-4cbe-b582-c4a4f1171472/56d220f3-b97c-4cbe-b582-c4a4f1171472.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1889.744207] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5c2f4246-cdcd-40b6-bb83-dfbdce05afcf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1889.765703] env[63024]: DEBUG oslo_vmware.api [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 1889.765703] env[63024]: value = "task-1951219" [ 1889.765703] env[63024]: _type = "Task" [ 1889.765703] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1889.776765] env[63024]: DEBUG oslo_vmware.api [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951219, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.109017] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1890.109647] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-97ee649a-5d90-4145-99f1-05736884b708 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.125244] env[63024]: DEBUG oslo_vmware.api [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1890.125244] env[63024]: value = "task-1951220" [ 1890.125244] env[63024]: _type = "Task" [ 1890.125244] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1890.137482] env[63024]: DEBUG oslo_vmware.api [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951220, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.143489] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d64135b2-184f-408a-9c8d-81eaf3539cff tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.681s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1890.149135] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6a50913e-4588-4852-99fc-35662bc69d8c tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.870s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1890.149135] env[63024]: DEBUG nova.objects.instance [None req-6a50913e-4588-4852-99fc-35662bc69d8c tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Lazy-loading 'resources' on Instance uuid e1be531c-e849-42ac-8319-5bd453a7a562 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1890.163498] env[63024]: DEBUG nova.compute.manager [None req-4438d070-28f0-4340-800d-87807eee6378 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1890.163821] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4438d070-28f0-4340-800d-87807eee6378 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1890.166557] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2363cf3d-fa15-485d-985c-41aa86ad146c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.176731] env[63024]: INFO nova.scheduler.client.report [None req-d64135b2-184f-408a-9c8d-81eaf3539cff tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Deleted allocations for instance 52c17abc-78f0-417b-8675-e8d62bc8baa3 [ 1890.189683] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-4438d070-28f0-4340-800d-87807eee6378 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1890.191311] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-aa0c4c31-6e48-45cb-b9c2-1b5b27a5a0f6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.197792] env[63024]: DEBUG oslo_vmware.api [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951217, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.205558] env[63024]: DEBUG oslo_vmware.api [None req-4438d070-28f0-4340-800d-87807eee6378 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1890.205558] env[63024]: value = "task-1951221" [ 1890.205558] env[63024]: _type = "Task" [ 1890.205558] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1890.219213] env[63024]: DEBUG oslo_vmware.api [None req-4438d070-28f0-4340-800d-87807eee6378 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951221, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.230270] env[63024]: DEBUG oslo_concurrency.lockutils [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "9e8e7b6e-1bb2-4e66-b734-2f56e31302af" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1890.231403] env[63024]: DEBUG oslo_concurrency.lockutils [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "9e8e7b6e-1bb2-4e66-b734-2f56e31302af" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1890.277747] env[63024]: DEBUG oslo_vmware.api [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951219, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.472552] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Acquiring lock "6e0aa58b-85e0-4e74-812f-cc01041ed6d3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1890.472810] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Lock "6e0aa58b-85e0-4e74-812f-cc01041ed6d3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1890.624328] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b6261d09-9b65-4470-bf9f-663330f30e1c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Acquiring lock "9edbda30-2e28-4961-a6ad-5ab34c40ed44" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1890.627023] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b6261d09-9b65-4470-bf9f-663330f30e1c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Lock "9edbda30-2e28-4961-a6ad-5ab34c40ed44" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1890.627023] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b6261d09-9b65-4470-bf9f-663330f30e1c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Acquiring lock "9edbda30-2e28-4961-a6ad-5ab34c40ed44-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1890.627023] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b6261d09-9b65-4470-bf9f-663330f30e1c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Lock "9edbda30-2e28-4961-a6ad-5ab34c40ed44-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1890.627023] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b6261d09-9b65-4470-bf9f-663330f30e1c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Lock "9edbda30-2e28-4961-a6ad-5ab34c40ed44-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1890.628046] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1890.633869] env[63024]: INFO nova.compute.manager [None req-b6261d09-9b65-4470-bf9f-663330f30e1c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] Terminating instance [ 1890.635992] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1890.645124] env[63024]: DEBUG oslo_vmware.api [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951220, 'name': PowerOffVM_Task, 'duration_secs': 0.427364} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1890.645422] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1890.645654] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1890.647244] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7356b9fe-3e0d-476b-be08-340bcc12baa0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.660009] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1890.660104] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-58cec9a8-158f-42ab-b0f2-9e880dca2a21 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.689656] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d64135b2-184f-408a-9c8d-81eaf3539cff tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Lock "52c17abc-78f0-417b-8675-e8d62bc8baa3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.389s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1890.697584] env[63024]: DEBUG oslo_vmware.api [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951217, 'name': PowerOnVM_Task, 'duration_secs': 1.056963} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1890.697812] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1890.698017] env[63024]: INFO nova.compute.manager [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Took 9.90 seconds to spawn the instance on the hypervisor. [ 1890.698215] env[63024]: DEBUG nova.compute.manager [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1890.699121] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20b49230-9a06-4543-8555-58b35a0d1b57 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.726567] env[63024]: DEBUG oslo_vmware.api [None req-4438d070-28f0-4340-800d-87807eee6378 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951221, 'name': PowerOffVM_Task, 'duration_secs': 0.33371} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1890.726871] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-4438d070-28f0-4340-800d-87807eee6378 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1890.728358] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4438d070-28f0-4340-800d-87807eee6378 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1890.728443] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-adcbcb3e-bcb9-4ff7-b9b1-63ceebdbafaf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.734262] env[63024]: DEBUG nova.compute.manager [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1890.750731] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1890.751088] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1890.751146] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Deleting the datastore file [datastore1] c1fd4146-6dd3-49e9-a744-466e6168e158 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1890.751522] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2f290d13-5aa6-428f-8a80-35e22f3b0d1a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.761723] env[63024]: DEBUG oslo_vmware.api [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1890.761723] env[63024]: value = "task-1951224" [ 1890.761723] env[63024]: _type = "Task" [ 1890.761723] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1890.778188] env[63024]: DEBUG oslo_vmware.api [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951224, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.786350] env[63024]: DEBUG oslo_vmware.api [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951219, 'name': ReconfigVM_Task, 'duration_secs': 0.754791} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1890.787438] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Reconfigured VM instance instance-0000004e to attach disk [datastore1] 56d220f3-b97c-4cbe-b582-c4a4f1171472/56d220f3-b97c-4cbe-b582-c4a4f1171472.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1890.787438] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-25bd8daf-2cf4-4adb-a35b-271d0938fd1f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.795355] env[63024]: DEBUG oslo_vmware.api [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 1890.795355] env[63024]: value = "task-1951225" [ 1890.795355] env[63024]: _type = "Task" [ 1890.795355] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1890.807981] env[63024]: DEBUG oslo_vmware.api [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951225, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.914169] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4438d070-28f0-4340-800d-87807eee6378 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1890.914169] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4438d070-28f0-4340-800d-87807eee6378 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1890.914169] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-4438d070-28f0-4340-800d-87807eee6378 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Deleting the datastore file [datastore1] 1709d916-d0c4-4706-b41b-8b0ed25f3331 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1890.914386] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-898f08fb-c4c6-4fd3-856d-ab8bc5da2532 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.925132] env[63024]: DEBUG oslo_vmware.api [None req-4438d070-28f0-4340-800d-87807eee6378 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1890.925132] env[63024]: value = "task-1951226" [ 1890.925132] env[63024]: _type = "Task" [ 1890.925132] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1890.934258] env[63024]: DEBUG oslo_vmware.api [None req-4438d070-28f0-4340-800d-87807eee6378 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951226, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1890.976822] env[63024]: DEBUG nova.compute.manager [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1891.142024] env[63024]: DEBUG nova.compute.manager [None req-b6261d09-9b65-4470-bf9f-663330f30e1c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1891.142024] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b6261d09-9b65-4470-bf9f-663330f30e1c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1891.152376] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce7320c6-b81e-4ea6-a464-ad43fe087131 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.156339] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1891.156448] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Starting heal instance info cache {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10258}} [ 1891.166357] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6261d09-9b65-4470-bf9f-663330f30e1c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1891.166641] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-967a48dc-a257-4718-8c6d-be5c2f7aa726 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.177326] env[63024]: DEBUG oslo_vmware.api [None req-b6261d09-9b65-4470-bf9f-663330f30e1c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Waiting for the task: (returnval){ [ 1891.177326] env[63024]: value = "task-1951227" [ 1891.177326] env[63024]: _type = "Task" [ 1891.177326] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1891.188968] env[63024]: DEBUG oslo_vmware.api [None req-b6261d09-9b65-4470-bf9f-663330f30e1c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951227, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1891.225241] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48705213-7672-48c6-9492-daf438f8616b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.230235] env[63024]: INFO nova.compute.manager [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Took 37.70 seconds to build instance. [ 1891.235313] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f62a4e0-971b-4e8a-9baa-8278063f622e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.282376] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a74e3b95-0208-4748-8694-fd548f2a1332 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.287737] env[63024]: DEBUG oslo_concurrency.lockutils [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1891.292915] env[63024]: DEBUG oslo_vmware.api [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951224, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.186801} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1891.295287] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1891.295587] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1891.295797] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1891.302027] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59fcb469-1108-4ec0-949f-2d22a4953be1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.315502] env[63024]: DEBUG oslo_vmware.api [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951225, 'name': Rename_Task, 'duration_secs': 0.199052} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1891.324122] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1891.325077] env[63024]: DEBUG nova.compute.provider_tree [None req-6a50913e-4588-4852-99fc-35662bc69d8c tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1891.325972] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cf351c63-6258-483b-a0c9-0033fcae3b7f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.333914] env[63024]: DEBUG oslo_vmware.api [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 1891.333914] env[63024]: value = "task-1951228" [ 1891.333914] env[63024]: _type = "Task" [ 1891.333914] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1891.344514] env[63024]: DEBUG oslo_vmware.api [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951228, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1891.436952] env[63024]: DEBUG oslo_vmware.api [None req-4438d070-28f0-4340-800d-87807eee6378 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951226, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.292784} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1891.437334] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-4438d070-28f0-4340-800d-87807eee6378 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1891.437505] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4438d070-28f0-4340-800d-87807eee6378 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1891.437656] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4438d070-28f0-4340-800d-87807eee6378 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1891.438341] env[63024]: INFO nova.compute.manager [None req-4438d070-28f0-4340-800d-87807eee6378 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Took 1.27 seconds to destroy the instance on the hypervisor. [ 1891.438341] env[63024]: DEBUG oslo.service.loopingcall [None req-4438d070-28f0-4340-800d-87807eee6378 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1891.438341] env[63024]: DEBUG nova.compute.manager [-] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1891.438791] env[63024]: DEBUG nova.network.neutron [-] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1891.511797] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1891.689205] env[63024]: DEBUG oslo_vmware.api [None req-b6261d09-9b65-4470-bf9f-663330f30e1c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951227, 'name': PowerOffVM_Task, 'duration_secs': 0.353639} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1891.689633] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6261d09-9b65-4470-bf9f-663330f30e1c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1891.689928] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b6261d09-9b65-4470-bf9f-663330f30e1c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1891.690344] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-395d5e37-1136-4a7a-a28e-f8200c0121d2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.733090] env[63024]: DEBUG oslo_concurrency.lockutils [None req-04051b79-a51d-4ea6-b527-5a091dcbb7f7 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "c28e7c21-7e7d-4cda-81e8-63538bd8a1f7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 59.375s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1891.829794] env[63024]: DEBUG nova.scheduler.client.report [None req-6a50913e-4588-4852-99fc-35662bc69d8c tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1891.846570] env[63024]: DEBUG oslo_vmware.api [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951228, 'name': PowerOnVM_Task} progress is 87%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1891.857899] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b6261d09-9b65-4470-bf9f-663330f30e1c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1891.857899] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b6261d09-9b65-4470-bf9f-663330f30e1c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1891.857899] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6261d09-9b65-4470-bf9f-663330f30e1c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Deleting the datastore file [datastore1] 9edbda30-2e28-4961-a6ad-5ab34c40ed44 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1891.857899] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-96fce2fb-be46-4e79-89be-f331ca6e5ada {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.864575] env[63024]: DEBUG oslo_vmware.api [None req-b6261d09-9b65-4470-bf9f-663330f30e1c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Waiting for the task: (returnval){ [ 1891.864575] env[63024]: value = "task-1951230" [ 1891.864575] env[63024]: _type = "Task" [ 1891.864575] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1891.875661] env[63024]: DEBUG oslo_vmware.api [None req-b6261d09-9b65-4470-bf9f-663330f30e1c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951230, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1891.957646] env[63024]: DEBUG nova.compute.manager [req-8784780a-9ef3-4bf2-ae60-17f2ca12e5a8 req-a2b2f991-cfb3-443c-9591-2e5401f9ef09 service nova] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Received event network-vif-deleted-611e1e79-ffb8-4ba9-8718-b57360eaa492 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1891.957992] env[63024]: INFO nova.compute.manager [req-8784780a-9ef3-4bf2-ae60-17f2ca12e5a8 req-a2b2f991-cfb3-443c-9591-2e5401f9ef09 service nova] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Neutron deleted interface 611e1e79-ffb8-4ba9-8718-b57360eaa492; detaching it from the instance and deleting it from the info cache [ 1891.958303] env[63024]: DEBUG nova.network.neutron [req-8784780a-9ef3-4bf2-ae60-17f2ca12e5a8 req-a2b2f991-cfb3-443c-9591-2e5401f9ef09 service nova] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1892.339584] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6a50913e-4588-4852-99fc-35662bc69d8c tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.193s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1892.341912] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7f1a3ebc-c409-4527-97cf-a41ee31c1b10 tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.586s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1892.343438] env[63024]: DEBUG nova.objects.instance [None req-7f1a3ebc-c409-4527-97cf-a41ee31c1b10 tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Lazy-loading 'resources' on Instance uuid 1448c924-7c61-4c43-a4e7-5a6dd45375cc {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1892.354579] env[63024]: DEBUG oslo_vmware.api [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951228, 'name': PowerOnVM_Task, 'duration_secs': 0.662824} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1892.356546] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1892.356775] env[63024]: INFO nova.compute.manager [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Took 8.89 seconds to spawn the instance on the hypervisor. [ 1892.356951] env[63024]: DEBUG nova.compute.manager [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1892.359801] env[63024]: DEBUG nova.virt.hardware [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1892.360061] env[63024]: DEBUG nova.virt.hardware [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1892.360233] env[63024]: DEBUG nova.virt.hardware [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1892.360441] env[63024]: DEBUG nova.virt.hardware [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1892.360591] env[63024]: DEBUG nova.virt.hardware [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1892.360740] env[63024]: DEBUG nova.virt.hardware [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1892.360955] env[63024]: DEBUG nova.virt.hardware [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1892.361154] env[63024]: DEBUG nova.virt.hardware [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1892.361386] env[63024]: DEBUG nova.virt.hardware [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1892.361605] env[63024]: DEBUG nova.virt.hardware [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1892.361769] env[63024]: DEBUG nova.virt.hardware [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1892.362623] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-125c6ea2-094c-446e-82ff-13c69d9646ec {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.368040] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-180b706c-ba1d-4429-bc31-3f7fb76a388f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.370952] env[63024]: INFO nova.scheduler.client.report [None req-6a50913e-4588-4852-99fc-35662bc69d8c tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Deleted allocations for instance e1be531c-e849-42ac-8319-5bd453a7a562 [ 1892.403134] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c31ffb16-ce83-40be-877c-7bf15aec4f45 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.408487] env[63024]: DEBUG oslo_vmware.api [None req-b6261d09-9b65-4470-bf9f-663330f30e1c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951230, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1892.429696] env[63024]: DEBUG nova.network.neutron [-] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1892.432575] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3f:14:c4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cf5bfbae-a882-4d34-be33-b31e274b3077', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c6c42da9-f98c-4f7e-94e7-39d45bc8f882', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1892.440896] env[63024]: DEBUG oslo.service.loopingcall [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1892.442368] env[63024]: INFO nova.compute.manager [-] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Took 1.00 seconds to deallocate network for instance. [ 1892.442368] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1892.444126] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-476c0835-3ab6-44c0-bba7-4c40dc55aa12 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.462656] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-49ec64f1-6762-47bb-a96a-fe5b7bccdc84 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.474509] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac9a92ca-6292-4c6f-9bfc-aa1b063e4f2a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.487669] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1892.487669] env[63024]: value = "task-1951231" [ 1892.487669] env[63024]: _type = "Task" [ 1892.487669] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1892.495683] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951231, 'name': CreateVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1892.516081] env[63024]: DEBUG nova.compute.manager [req-8784780a-9ef3-4bf2-ae60-17f2ca12e5a8 req-a2b2f991-cfb3-443c-9591-2e5401f9ef09 service nova] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Detach interface failed, port_id=611e1e79-ffb8-4ba9-8718-b57360eaa492, reason: Instance 1709d916-d0c4-4706-b41b-8b0ed25f3331 could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 1892.885024] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6a50913e-4588-4852-99fc-35662bc69d8c tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Lock "e1be531c-e849-42ac-8319-5bd453a7a562" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.262s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1892.892707] env[63024]: DEBUG oslo_vmware.api [None req-b6261d09-9b65-4470-bf9f-663330f30e1c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951230, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.543719} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1892.892998] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6261d09-9b65-4470-bf9f-663330f30e1c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1892.893219] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b6261d09-9b65-4470-bf9f-663330f30e1c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1892.893416] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b6261d09-9b65-4470-bf9f-663330f30e1c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1892.893603] env[63024]: INFO nova.compute.manager [None req-b6261d09-9b65-4470-bf9f-663330f30e1c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] Took 1.75 seconds to destroy the instance on the hypervisor. [ 1892.893901] env[63024]: DEBUG oslo.service.loopingcall [None req-b6261d09-9b65-4470-bf9f-663330f30e1c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1892.894127] env[63024]: DEBUG nova.compute.manager [-] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1892.894247] env[63024]: DEBUG nova.network.neutron [-] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1892.929102] env[63024]: INFO nova.compute.manager [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Took 32.62 seconds to build instance. [ 1892.966696] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4438d070-28f0-4340-800d-87807eee6378 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1892.999647] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951231, 'name': CreateVM_Task, 'duration_secs': 0.507747} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1893.002705] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1893.003375] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1893.003632] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1893.003983] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1893.004298] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a798fef-ac60-41b8-b06f-c2ff101c90b1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.012151] env[63024]: DEBUG oslo_vmware.api [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1893.012151] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5272b64a-d3c9-e389-8895-34c3bebb39c7" [ 1893.012151] env[63024]: _type = "Task" [ 1893.012151] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1893.023342] env[63024]: DEBUG oslo_vmware.api [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5272b64a-d3c9-e389-8895-34c3bebb39c7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1893.393517] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bad14a28-923f-43ec-a545-6f9417cf88b5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.403292] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b13dadda-fffc-4079-a99e-83010be69f45 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.435895] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4bdd682b-a6ce-4bde-800c-76b1ede4e7bd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "56d220f3-b97c-4cbe-b582-c4a4f1171472" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.321s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1893.437066] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbb4c309-6c53-4226-a6c3-a630e8bdb172 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.446539] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6636031-7044-4b0b-8afb-fa051fd9f4e2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.462737] env[63024]: DEBUG nova.compute.provider_tree [None req-7f1a3ebc-c409-4527-97cf-a41ee31c1b10 tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1893.523843] env[63024]: DEBUG oslo_vmware.api [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5272b64a-d3c9-e389-8895-34c3bebb39c7, 'name': SearchDatastore_Task, 'duration_secs': 0.012894} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1893.523843] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1893.524056] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1893.524280] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1893.524450] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1893.524666] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1893.524965] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fa9ce77b-7d13-462e-bc4a-819b6c9ee529 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.535321] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1893.535441] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1893.536175] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e51cfb1-69d9-4240-a77c-f13429031c92 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.542749] env[63024]: DEBUG oslo_vmware.api [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1893.542749] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52cd8d7d-fa56-510c-de46-1e3da9f8d5b7" [ 1893.542749] env[63024]: _type = "Task" [ 1893.542749] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1893.551220] env[63024]: DEBUG oslo_vmware.api [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52cd8d7d-fa56-510c-de46-1e3da9f8d5b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1893.725788] env[63024]: DEBUG oslo_concurrency.lockutils [None req-73bc45f1-8438-455e-b67c-3427a6a5322f tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "56d220f3-b97c-4cbe-b582-c4a4f1171472" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1893.726069] env[63024]: DEBUG oslo_concurrency.lockutils [None req-73bc45f1-8438-455e-b67c-3427a6a5322f tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "56d220f3-b97c-4cbe-b582-c4a4f1171472" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1893.726286] env[63024]: DEBUG oslo_concurrency.lockutils [None req-73bc45f1-8438-455e-b67c-3427a6a5322f tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "56d220f3-b97c-4cbe-b582-c4a4f1171472-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1893.726468] env[63024]: DEBUG oslo_concurrency.lockutils [None req-73bc45f1-8438-455e-b67c-3427a6a5322f tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "56d220f3-b97c-4cbe-b582-c4a4f1171472-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1893.727134] env[63024]: DEBUG oslo_concurrency.lockutils [None req-73bc45f1-8438-455e-b67c-3427a6a5322f tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "56d220f3-b97c-4cbe-b582-c4a4f1171472-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1893.728269] env[63024]: DEBUG nova.network.neutron [-] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1893.733018] env[63024]: INFO nova.compute.manager [None req-73bc45f1-8438-455e-b67c-3427a6a5322f tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Terminating instance [ 1893.984356] env[63024]: ERROR nova.scheduler.client.report [None req-7f1a3ebc-c409-4527-97cf-a41ee31c1b10 tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] [req-f1a90a38-88bd-4850-8361-145c5e6b08ae] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 89dfa68a-133e-436f-a9f1-86051f9fb96b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-f1a90a38-88bd-4850-8361-145c5e6b08ae"}]} [ 1893.993257] env[63024]: DEBUG nova.compute.manager [req-c20a56ed-53ad-495a-b645-bb06a810c7c2 req-fdbaf27c-01e6-4483-ad5d-112adeb682e1 service nova] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Received event network-changed-749aba46-5057-4a6a-8e7c-f7df42b7d129 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1893.993484] env[63024]: DEBUG nova.compute.manager [req-c20a56ed-53ad-495a-b645-bb06a810c7c2 req-fdbaf27c-01e6-4483-ad5d-112adeb682e1 service nova] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Refreshing instance network info cache due to event network-changed-749aba46-5057-4a6a-8e7c-f7df42b7d129. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1893.993658] env[63024]: DEBUG oslo_concurrency.lockutils [req-c20a56ed-53ad-495a-b645-bb06a810c7c2 req-fdbaf27c-01e6-4483-ad5d-112adeb682e1 service nova] Acquiring lock "refresh_cache-c28e7c21-7e7d-4cda-81e8-63538bd8a1f7" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1893.993800] env[63024]: DEBUG oslo_concurrency.lockutils [req-c20a56ed-53ad-495a-b645-bb06a810c7c2 req-fdbaf27c-01e6-4483-ad5d-112adeb682e1 service nova] Acquired lock "refresh_cache-c28e7c21-7e7d-4cda-81e8-63538bd8a1f7" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1893.994722] env[63024]: DEBUG nova.network.neutron [req-c20a56ed-53ad-495a-b645-bb06a810c7c2 req-fdbaf27c-01e6-4483-ad5d-112adeb682e1 service nova] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Refreshing network info cache for port 749aba46-5057-4a6a-8e7c-f7df42b7d129 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1894.003437] env[63024]: DEBUG nova.scheduler.client.report [None req-7f1a3ebc-c409-4527-97cf-a41ee31c1b10 tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Refreshing inventories for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1894.015050] env[63024]: DEBUG nova.scheduler.client.report [None req-7f1a3ebc-c409-4527-97cf-a41ee31c1b10 tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Updating ProviderTree inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1894.015269] env[63024]: DEBUG nova.compute.provider_tree [None req-7f1a3ebc-c409-4527-97cf-a41ee31c1b10 tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1894.026526] env[63024]: DEBUG nova.scheduler.client.report [None req-7f1a3ebc-c409-4527-97cf-a41ee31c1b10 tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Refreshing aggregate associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, aggregates: None {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1894.043231] env[63024]: DEBUG nova.scheduler.client.report [None req-7f1a3ebc-c409-4527-97cf-a41ee31c1b10 tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Refreshing trait associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1894.056170] env[63024]: DEBUG oslo_vmware.api [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52cd8d7d-fa56-510c-de46-1e3da9f8d5b7, 'name': SearchDatastore_Task, 'duration_secs': 0.044331} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1894.056933] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a5021874-d6f5-45ec-a5bd-8cdc699cebc5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.066624] env[63024]: DEBUG oslo_vmware.api [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1894.066624] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e8ca2a-52a7-ae17-b3f6-c579fad3346f" [ 1894.066624] env[63024]: _type = "Task" [ 1894.066624] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1894.074066] env[63024]: DEBUG oslo_vmware.api [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e8ca2a-52a7-ae17-b3f6-c579fad3346f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1894.231872] env[63024]: INFO nova.compute.manager [-] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] Took 1.34 seconds to deallocate network for instance. [ 1894.236445] env[63024]: DEBUG nova.compute.manager [None req-73bc45f1-8438-455e-b67c-3427a6a5322f tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1894.236751] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-73bc45f1-8438-455e-b67c-3427a6a5322f tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1894.239828] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2071d28a-a3be-4769-986c-4c90ea63e377 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.248053] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-73bc45f1-8438-455e-b67c-3427a6a5322f tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1894.248303] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-72662ade-3fe3-4a94-83c8-babcb57c96b6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.255669] env[63024]: DEBUG oslo_vmware.api [None req-73bc45f1-8438-455e-b67c-3427a6a5322f tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 1894.255669] env[63024]: value = "task-1951232" [ 1894.255669] env[63024]: _type = "Task" [ 1894.255669] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1894.264953] env[63024]: DEBUG oslo_vmware.api [None req-73bc45f1-8438-455e-b67c-3427a6a5322f tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951232, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1894.469209] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Acquiring lock "669c45b0-34d6-45f8-a30e-b9b96cfd71ef" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1894.469499] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Lock "669c45b0-34d6-45f8-a30e-b9b96cfd71ef" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1894.484798] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e7b403b-2260-4425-90d1-882623317b59 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.494013] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0f7513a-1268-40a6-a246-1702f61998b0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.531612] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Acquiring lock "96afa44e-d8c6-419c-ae69-04b7b306c2c5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1894.531903] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Lock "96afa44e-d8c6-419c-ae69-04b7b306c2c5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1894.537895] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d438a807-342e-4d2d-88c4-86800e5756b2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.550283] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40ad3062-04f3-48a2-9673-01e69ba8c238 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.567338] env[63024]: DEBUG nova.compute.provider_tree [None req-7f1a3ebc-c409-4527-97cf-a41ee31c1b10 tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1894.585595] env[63024]: DEBUG oslo_vmware.api [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e8ca2a-52a7-ae17-b3f6-c579fad3346f, 'name': SearchDatastore_Task, 'duration_secs': 0.013892} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1894.585935] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1894.586326] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] c1fd4146-6dd3-49e9-a744-466e6168e158/c1fd4146-6dd3-49e9-a744-466e6168e158.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1894.586614] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c5872c46-e465-46a3-be0b-202bd0e734fb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.595615] env[63024]: DEBUG oslo_vmware.api [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1894.595615] env[63024]: value = "task-1951233" [ 1894.595615] env[63024]: _type = "Task" [ 1894.595615] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1894.605854] env[63024]: DEBUG oslo_vmware.api [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951233, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1894.684063] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Didn't find any instances for network info cache update. {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10344}} [ 1894.684288] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1894.684446] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1894.684593] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1894.684734] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1894.684867] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1894.685010] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._sync_power_states {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1894.741683] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b6261d09-9b65-4470-bf9f-663330f30e1c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1894.767348] env[63024]: DEBUG oslo_vmware.api [None req-73bc45f1-8438-455e-b67c-3427a6a5322f tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951232, 'name': PowerOffVM_Task, 'duration_secs': 0.337037} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1894.767633] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-73bc45f1-8438-455e-b67c-3427a6a5322f tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1894.767796] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-73bc45f1-8438-455e-b67c-3427a6a5322f tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1894.768078] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-234164bb-6462-46dd-91c6-d864bfca7574 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.783145] env[63024]: DEBUG nova.network.neutron [req-c20a56ed-53ad-495a-b645-bb06a810c7c2 req-fdbaf27c-01e6-4483-ad5d-112adeb682e1 service nova] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Updated VIF entry in instance network info cache for port 749aba46-5057-4a6a-8e7c-f7df42b7d129. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1894.783524] env[63024]: DEBUG nova.network.neutron [req-c20a56ed-53ad-495a-b645-bb06a810c7c2 req-fdbaf27c-01e6-4483-ad5d-112adeb682e1 service nova] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Updating instance_info_cache with network_info: [{"id": "749aba46-5057-4a6a-8e7c-f7df42b7d129", "address": "fa:16:3e:8b:26:58", "network": {"id": "8b46e914-e3e8-40c4-8f9d-01d1f97bf25e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1558746173-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.170", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bbfeec6d47746328f185acd132e0d5a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afae6acd-1873-4228-9d5a-1cd5d4efe3e4", "external-id": "nsx-vlan-transportzone-183", "segmentation_id": 183, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap749aba46-50", "ovs_interfaceid": "749aba46-5057-4a6a-8e7c-f7df42b7d129", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1894.867114] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-73bc45f1-8438-455e-b67c-3427a6a5322f tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1894.867114] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-73bc45f1-8438-455e-b67c-3427a6a5322f tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1894.867114] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-73bc45f1-8438-455e-b67c-3427a6a5322f tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Deleting the datastore file [datastore1] 56d220f3-b97c-4cbe-b582-c4a4f1171472 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1894.867830] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ab5f96fa-ad05-4172-bd41-d877f1e0a3d3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.876696] env[63024]: DEBUG oslo_vmware.api [None req-73bc45f1-8438-455e-b67c-3427a6a5322f tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 1894.876696] env[63024]: value = "task-1951235" [ 1894.876696] env[63024]: _type = "Task" [ 1894.876696] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1894.888174] env[63024]: DEBUG oslo_vmware.api [None req-73bc45f1-8438-455e-b67c-3427a6a5322f tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951235, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1894.972686] env[63024]: DEBUG nova.compute.manager [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1895.043043] env[63024]: DEBUG nova.compute.manager [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1895.103381] env[63024]: DEBUG nova.scheduler.client.report [None req-7f1a3ebc-c409-4527-97cf-a41ee31c1b10 tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Updated inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with generation 117 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1895.103735] env[63024]: DEBUG nova.compute.provider_tree [None req-7f1a3ebc-c409-4527-97cf-a41ee31c1b10 tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Updating resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b generation from 117 to 118 during operation: update_inventory {{(pid=63024) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1895.103965] env[63024]: DEBUG nova.compute.provider_tree [None req-7f1a3ebc-c409-4527-97cf-a41ee31c1b10 tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1895.110545] env[63024]: DEBUG oslo_vmware.api [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951233, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.483215} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1895.111066] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] c1fd4146-6dd3-49e9-a744-466e6168e158/c1fd4146-6dd3-49e9-a744-466e6168e158.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1895.111329] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1895.111670] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-04174862-dbd5-4ad7-9021-7eea03926e68 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.119681] env[63024]: DEBUG oslo_vmware.api [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1895.119681] env[63024]: value = "task-1951236" [ 1895.119681] env[63024]: _type = "Task" [ 1895.119681] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1895.130225] env[63024]: DEBUG oslo_vmware.api [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951236, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.192423] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Getting list of instances from cluster (obj){ [ 1895.192423] env[63024]: value = "domain-c8" [ 1895.192423] env[63024]: _type = "ClusterComputeResource" [ 1895.192423] env[63024]: } {{(pid=63024) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1895.194421] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89beb58e-f7da-4aa8-8098-4cc4ed9f2939 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.223871] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Got total of 21 instances {{(pid=63024) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1895.224163] env[63024]: WARNING nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] While synchronizing instance power states, found 28 instances in the database and 21 instances on the hypervisor. [ 1895.224267] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Triggering sync for uuid f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df {{(pid=63024) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10668}} [ 1895.224403] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Triggering sync for uuid 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2 {{(pid=63024) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10668}} [ 1895.224552] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Triggering sync for uuid e8ad74ce-7862-4574-98e7-14bc54bd5d6c {{(pid=63024) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10668}} [ 1895.224705] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Triggering sync for uuid df2933d1-32c3-48a6-8ceb-d5e3047d0b78 {{(pid=63024) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10668}} [ 1895.224870] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Triggering sync for uuid 37792b57-3347-4134-a060-53359afa3298 {{(pid=63024) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10668}} [ 1895.225075] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Triggering sync for uuid c1fd4146-6dd3-49e9-a744-466e6168e158 {{(pid=63024) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10668}} [ 1895.225154] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Triggering sync for uuid 82b7019c-5049-4b8b-abb4-46f326ce3d5b {{(pid=63024) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10668}} [ 1895.225293] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Triggering sync for uuid b588ea21-dea0-4ee6-8f9e-12007d0a1ce1 {{(pid=63024) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10668}} [ 1895.225438] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Triggering sync for uuid 31a693b6-293a-4f01-9baf-a9e7e8d453d4 {{(pid=63024) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10668}} [ 1895.225580] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Triggering sync for uuid 601a003d-811c-4698-b0b6-054482d32c21 {{(pid=63024) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10668}} [ 1895.225723] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Triggering sync for uuid 839776ef-0562-424d-b301-2aa896f32e14 {{(pid=63024) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10668}} [ 1895.225865] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Triggering sync for uuid 9267e5e4-732d-47f1-8a30-d926a1269fb9 {{(pid=63024) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10668}} [ 1895.225999] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Triggering sync for uuid 1709d916-d0c4-4706-b41b-8b0ed25f3331 {{(pid=63024) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10668}} [ 1895.226161] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Triggering sync for uuid b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4 {{(pid=63024) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10668}} [ 1895.226449] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Triggering sync for uuid ac60546a-37b2-4d2a-8505-61fe202e2ed0 {{(pid=63024) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10668}} [ 1895.226449] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Triggering sync for uuid 7cf0ac90-d87d-4644-8a88-da5328d1721d {{(pid=63024) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10668}} [ 1895.226577] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Triggering sync for uuid 92d1f96e-bbe7-4654-9d3a-47ba40057157 {{(pid=63024) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10668}} [ 1895.226715] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Triggering sync for uuid fe6847e2-a742-4338-983f-698c13aaefde {{(pid=63024) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10668}} [ 1895.226841] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Triggering sync for uuid 1448c924-7c61-4c43-a4e7-5a6dd45375cc {{(pid=63024) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10668}} [ 1895.226985] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Triggering sync for uuid 73db94b8-cfa8-4457-bccb-d4b780edbd93 {{(pid=63024) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10668}} [ 1895.227134] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Triggering sync for uuid 77c27741-ee3a-4a8b-bbd3-89759288f7c6 {{(pid=63024) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10668}} [ 1895.227274] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Triggering sync for uuid 01b8072a-4483-4932-8294-7e5b48e6b203 {{(pid=63024) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10668}} [ 1895.227536] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Triggering sync for uuid 9a7f4452-ae50-4779-8474-11d3a6d3533f {{(pid=63024) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10668}} [ 1895.227693] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Triggering sync for uuid 0f371c69-c7ae-4649-b038-be82e8ca74e1 {{(pid=63024) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10668}} [ 1895.227834] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Triggering sync for uuid 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc {{(pid=63024) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10668}} [ 1895.227977] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Triggering sync for uuid 9edbda30-2e28-4961-a6ad-5ab34c40ed44 {{(pid=63024) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10668}} [ 1895.228128] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Triggering sync for uuid c28e7c21-7e7d-4cda-81e8-63538bd8a1f7 {{(pid=63024) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10668}} [ 1895.228272] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Triggering sync for uuid 56d220f3-b97c-4cbe-b582-c4a4f1171472 {{(pid=63024) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10668}} [ 1895.228692] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.228945] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "9716d592-32d1-4f1d-b42b-1c8a7d81d2f2" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.229147] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "9716d592-32d1-4f1d-b42b-1c8a7d81d2f2" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1895.229406] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "e8ad74ce-7862-4574-98e7-14bc54bd5d6c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.229595] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "e8ad74ce-7862-4574-98e7-14bc54bd5d6c" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1895.229839] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "df2933d1-32c3-48a6-8ceb-d5e3047d0b78" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.229999] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "df2933d1-32c3-48a6-8ceb-d5e3047d0b78" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1895.230614] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "37792b57-3347-4134-a060-53359afa3298" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.230614] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "c1fd4146-6dd3-49e9-a744-466e6168e158" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.230614] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "c1fd4146-6dd3-49e9-a744-466e6168e158" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1895.230776] env[63024]: INFO nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] During sync_power_state the instance has a pending task (rebuild_spawning). Skip. [ 1895.230904] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "c1fd4146-6dd3-49e9-a744-466e6168e158" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1895.231099] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "82b7019c-5049-4b8b-abb4-46f326ce3d5b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.231270] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "82b7019c-5049-4b8b-abb4-46f326ce3d5b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1895.231534] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "b588ea21-dea0-4ee6-8f9e-12007d0a1ce1" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.231713] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "b588ea21-dea0-4ee6-8f9e-12007d0a1ce1" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1895.231932] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "31a693b6-293a-4f01-9baf-a9e7e8d453d4" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.232120] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "31a693b6-293a-4f01-9baf-a9e7e8d453d4" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1895.232340] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "601a003d-811c-4698-b0b6-054482d32c21" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.232522] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "601a003d-811c-4698-b0b6-054482d32c21" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1895.232733] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "839776ef-0562-424d-b301-2aa896f32e14" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.232905] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "839776ef-0562-424d-b301-2aa896f32e14" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1895.233135] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "9267e5e4-732d-47f1-8a30-d926a1269fb9" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.233307] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "9267e5e4-732d-47f1-8a30-d926a1269fb9" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1895.233654] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "1709d916-d0c4-4706-b41b-8b0ed25f3331" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.233893] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.234092] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1895.234315] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "ac60546a-37b2-4d2a-8505-61fe202e2ed0" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.234489] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "ac60546a-37b2-4d2a-8505-61fe202e2ed0" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1895.234700] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "7cf0ac90-d87d-4644-8a88-da5328d1721d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.234897] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "92d1f96e-bbe7-4654-9d3a-47ba40057157" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.235224] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "92d1f96e-bbe7-4654-9d3a-47ba40057157" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1895.235427] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "fe6847e2-a742-4338-983f-698c13aaefde" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.235616] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "fe6847e2-a742-4338-983f-698c13aaefde" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1895.235841] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "1448c924-7c61-4c43-a4e7-5a6dd45375cc" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.236056] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "73db94b8-cfa8-4457-bccb-d4b780edbd93" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.236236] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "73db94b8-cfa8-4457-bccb-d4b780edbd93" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1895.236450] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "77c27741-ee3a-4a8b-bbd3-89759288f7c6" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.236629] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "77c27741-ee3a-4a8b-bbd3-89759288f7c6" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1895.236846] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "01b8072a-4483-4932-8294-7e5b48e6b203" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.237134] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "01b8072a-4483-4932-8294-7e5b48e6b203" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1895.237355] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "9a7f4452-ae50-4779-8474-11d3a6d3533f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.237533] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "9a7f4452-ae50-4779-8474-11d3a6d3533f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1895.237739] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "0f371c69-c7ae-4649-b038-be82e8ca74e1" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.237912] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "0f371c69-c7ae-4649-b038-be82e8ca74e1" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1895.238157] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "9ca6342c-55bd-4c78-9fa6-3caf4ec744bc" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.238332] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "9ca6342c-55bd-4c78-9fa6-3caf4ec744bc" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1895.238547] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "9edbda30-2e28-4961-a6ad-5ab34c40ed44" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.238747] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "c28e7c21-7e7d-4cda-81e8-63538bd8a1f7" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.238917] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "c28e7c21-7e7d-4cda-81e8-63538bd8a1f7" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1895.239142] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "56d220f3-b97c-4cbe-b582-c4a4f1171472" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.239326] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1895.239462] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63024) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10877}} [ 1895.240252] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f550fbdf-e725-4bef-a75f-a8b1e879060c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.243475] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbd2ed39-27b3-4647-83f2-25131c2b68b4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.246278] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5208ec85-bb04-419a-a8a4-3462c5d50fe7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.249400] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5801da55-d839-4086-8559-9aefed3ef095 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.252073] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb4da1f2-8b8e-4953-89e0-0ff5b092058c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.255309] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9061ad63-1788-4260-aef7-951aa98ee39f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.258462] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-690db0c9-6d44-45f7-b04a-2dc00ece40c0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.261346] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f9bb6c4-5b95-49e1-84ea-0b845d9ea155 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.264377] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fdc656e-a321-4f6a-be47-49164144f090 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.267269] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc8e4dc4-e6eb-4c9d-86e9-ea2005065753 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.270282] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88bf8f20-02c9-4c25-9fde-a5133453d59a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.273215] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc3bc7cf-014f-45d6-ba08-b8cb823205e8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.276313] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2adf916-b3c7-446c-93bf-abebb315c2e4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.279180] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95291b39-62db-4d3c-b3d7-874e507bcda3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.283261] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34045d3e-eaa8-49f4-b033-ebc160fcd54c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.286008] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-023ca4cd-5a22-4df1-9879-04af4e4bd7c0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.288947] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c3b9568-25b8-4244-be0a-b10cb457adeb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.291946] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-098a467a-03b8-4d4f-9783-5e7952b0d424 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.295102] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb2f056e-2be6-46b9-9aff-c892913aa37e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.298026] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6c19f37-5a61-46a2-be65-15d8fa66f680 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.300455] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager.update_available_resource {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1895.316021] env[63024]: DEBUG oslo_concurrency.lockutils [req-c20a56ed-53ad-495a-b645-bb06a810c7c2 req-fdbaf27c-01e6-4483-ad5d-112adeb682e1 service nova] Releasing lock "refresh_cache-c28e7c21-7e7d-4cda-81e8-63538bd8a1f7" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1895.316021] env[63024]: DEBUG nova.compute.manager [req-c20a56ed-53ad-495a-b645-bb06a810c7c2 req-fdbaf27c-01e6-4483-ad5d-112adeb682e1 service nova] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] Received event network-vif-deleted-736ac7e2-7a3c-429f-ad94-557967750b24 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1895.361443] env[63024]: WARNING oslo_messaging._drivers.amqpdriver [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Number of call queues is 11, greater than warning threshold: 10. There could be a leak. Increasing threshold to: 20 [ 1895.367045] env[63024]: WARNING urllib3.connectionpool [-] Connection pool is full, discarding connection: vc1.osci.c.eu-de-1.cloud.sap. Connection pool size: 10: queue.Full [ 1895.370061] env[63024]: WARNING urllib3.connectionpool [-] Connection pool is full, discarding connection: vc1.osci.c.eu-de-1.cloud.sap. Connection pool size: 10: queue.Full [ 1895.371932] env[63024]: WARNING urllib3.connectionpool [-] Connection pool is full, discarding connection: vc1.osci.c.eu-de-1.cloud.sap. Connection pool size: 10: queue.Full [ 1895.373913] env[63024]: WARNING urllib3.connectionpool [-] Connection pool is full, discarding connection: vc1.osci.c.eu-de-1.cloud.sap. Connection pool size: 10: queue.Full [ 1895.382733] env[63024]: WARNING urllib3.connectionpool [-] Connection pool is full, discarding connection: vc1.osci.c.eu-de-1.cloud.sap. Connection pool size: 10: queue.Full [ 1895.385142] env[63024]: WARNING urllib3.connectionpool [-] Connection pool is full, discarding connection: vc1.osci.c.eu-de-1.cloud.sap. Connection pool size: 10: queue.Full [ 1895.387099] env[63024]: WARNING urllib3.connectionpool [-] Connection pool is full, discarding connection: vc1.osci.c.eu-de-1.cloud.sap. Connection pool size: 10: queue.Full [ 1895.388693] env[63024]: WARNING urllib3.connectionpool [-] Connection pool is full, discarding connection: vc1.osci.c.eu-de-1.cloud.sap. Connection pool size: 10: queue.Full [ 1895.390459] env[63024]: WARNING urllib3.connectionpool [-] Connection pool is full, discarding connection: vc1.osci.c.eu-de-1.cloud.sap. Connection pool size: 10: queue.Full [ 1895.391846] env[63024]: WARNING urllib3.connectionpool [-] Connection pool is full, discarding connection: vc1.osci.c.eu-de-1.cloud.sap. Connection pool size: 10: queue.Full [ 1895.397520] env[63024]: WARNING oslo_messaging._drivers.amqpdriver [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Number of call queues is 21, greater than warning threshold: 20. There could be a leak. Increasing threshold to: 40 [ 1895.421569] env[63024]: DEBUG oslo_vmware.api [None req-73bc45f1-8438-455e-b67c-3427a6a5322f tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951235, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.460765} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1895.423792] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-73bc45f1-8438-455e-b67c-3427a6a5322f tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1895.423972] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-73bc45f1-8438-455e-b67c-3427a6a5322f tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1895.424171] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-73bc45f1-8438-455e-b67c-3427a6a5322f tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1895.424341] env[63024]: INFO nova.compute.manager [None req-73bc45f1-8438-455e-b67c-3427a6a5322f tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1895.424568] env[63024]: DEBUG oslo.service.loopingcall [None req-73bc45f1-8438-455e-b67c-3427a6a5322f tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1895.426463] env[63024]: DEBUG nova.compute.manager [-] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1895.426569] env[63024]: DEBUG nova.network.neutron [-] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1895.495240] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.565703] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.612382] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7f1a3ebc-c409-4527-97cf-a41ee31c1b10 tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.270s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1895.614856] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9858a7ec-7d80-4734-a8c9-df8a139f53ee tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.997s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1895.615138] env[63024]: DEBUG nova.objects.instance [None req-9858a7ec-7d80-4734-a8c9-df8a139f53ee tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Lazy-loading 'resources' on Instance uuid f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1895.633945] env[63024]: DEBUG oslo_vmware.api [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951236, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.139502} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1895.636790] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1895.636790] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-364e35ce-caca-450d-a41c-ef08f6dba065 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.640796] env[63024]: INFO nova.scheduler.client.report [None req-7f1a3ebc-c409-4527-97cf-a41ee31c1b10 tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Deleted allocations for instance 1448c924-7c61-4c43-a4e7-5a6dd45375cc [ 1895.664255] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Reconfiguring VM instance instance-0000002d to attach disk [datastore1] c1fd4146-6dd3-49e9-a744-466e6168e158/c1fd4146-6dd3-49e9-a744-466e6168e158.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1895.664797] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-037adf25-ef24-4848-9a45-de60656c6db6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1895.694167] env[63024]: DEBUG oslo_vmware.api [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1895.694167] env[63024]: value = "task-1951237" [ 1895.694167] env[63024]: _type = "Task" [ 1895.694167] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1895.706666] env[63024]: DEBUG oslo_vmware.api [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951237, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.817787] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1895.867180] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "df2933d1-32c3-48a6-8ceb-d5e3047d0b78" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.637s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1895.867653] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "e8ad74ce-7862-4574-98e7-14bc54bd5d6c" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.638s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1895.874296] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "9716d592-32d1-4f1d-b42b-1c8a7d81d2f2" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.645s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1895.884880] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "9ca6342c-55bd-4c78-9fa6-3caf4ec744bc" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.646s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1895.889380] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "9a7f4452-ae50-4779-8474-11d3a6d3533f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.652s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1895.894315] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "b588ea21-dea0-4ee6-8f9e-12007d0a1ce1" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.663s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1895.903916] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "77c27741-ee3a-4a8b-bbd3-89759288f7c6" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.667s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1895.905394] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "9267e5e4-732d-47f1-8a30-d926a1269fb9" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.672s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1895.905705] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "601a003d-811c-4698-b0b6-054482d32c21" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.673s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1895.927357] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "ac60546a-37b2-4d2a-8505-61fe202e2ed0" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.693s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1895.929876] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "31a693b6-293a-4f01-9baf-a9e7e8d453d4" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.698s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1895.931294] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "c28e7c21-7e7d-4cda-81e8-63538bd8a1f7" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.692s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1895.931819] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.697s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1895.933369] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "01b8072a-4483-4932-8294-7e5b48e6b203" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.696s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1895.933699] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "82b7019c-5049-4b8b-abb4-46f326ce3d5b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.702s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1895.934151] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "fe6847e2-a742-4338-983f-698c13aaefde" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.698s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1895.934487] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "0f371c69-c7ae-4649-b038-be82e8ca74e1" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.697s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1895.934797] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "839776ef-0562-424d-b301-2aa896f32e14" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.702s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1895.935109] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "73db94b8-cfa8-4457-bccb-d4b780edbd93" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.699s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1895.935414] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "92d1f96e-bbe7-4654-9d3a-47ba40057157" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.700s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1896.020830] env[63024]: DEBUG nova.compute.manager [req-8b4d8f4b-a37a-4934-bf8c-bce4360e0d7f req-c5c39026-4642-42a1-82eb-569fc7eae53a service nova] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Received event network-vif-deleted-ece34716-8aa8-4585-95d9-ccb4eaca0ae6 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1896.021017] env[63024]: INFO nova.compute.manager [req-8b4d8f4b-a37a-4934-bf8c-bce4360e0d7f req-c5c39026-4642-42a1-82eb-569fc7eae53a service nova] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Neutron deleted interface ece34716-8aa8-4585-95d9-ccb4eaca0ae6; detaching it from the instance and deleting it from the info cache [ 1896.021184] env[63024]: DEBUG nova.network.neutron [req-8b4d8f4b-a37a-4934-bf8c-bce4360e0d7f req-c5c39026-4642-42a1-82eb-569fc7eae53a service nova] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1896.190097] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7f1a3ebc-c409-4527-97cf-a41ee31c1b10 tempest-ServerRescueTestJSONUnderV235-1910485964 tempest-ServerRescueTestJSONUnderV235-1910485964-project-member] Lock "1448c924-7c61-4c43-a4e7-5a6dd45375cc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.492s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1896.191149] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "1448c924-7c61-4c43-a4e7-5a6dd45375cc" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.955s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1896.191487] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0c212b6a-b3c9-40cd-b0e8-388b78076461 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.203584] env[63024]: DEBUG nova.network.neutron [-] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1896.207666] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6540a51-1372-4388-bb2c-6e362c17996a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.224414] env[63024]: INFO nova.compute.manager [-] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Took 0.80 seconds to deallocate network for instance. [ 1896.225000] env[63024]: DEBUG oslo_vmware.api [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951237, 'name': ReconfigVM_Task, 'duration_secs': 0.457182} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1896.226999] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Reconfigured VM instance instance-0000002d to attach disk [datastore1] c1fd4146-6dd3-49e9-a744-466e6168e158/c1fd4146-6dd3-49e9-a744-466e6168e158.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1896.230239] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6bb3fefc-430f-475f-830d-cde9bb483b03 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.238709] env[63024]: DEBUG oslo_vmware.api [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1896.238709] env[63024]: value = "task-1951238" [ 1896.238709] env[63024]: _type = "Task" [ 1896.238709] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1896.977333] env[63024]: DEBUG oslo_concurrency.lockutils [None req-73bc45f1-8438-455e-b67c-3427a6a5322f tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1896.983675] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-64a1bb97-f92a-4798-9804-241f4676c30b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.990068] env[63024]: DEBUG oslo_vmware.api [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951238, 'name': Rename_Task, 'duration_secs': 0.148878} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1896.991151] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1896.991392] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8e0fab62-cef6-440c-bdd6-070472722c8b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1896.999114] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab1d7998-acb4-497d-9c6b-87928f4ae664 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.013762] env[63024]: DEBUG oslo_vmware.api [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1897.013762] env[63024]: value = "task-1951239" [ 1897.013762] env[63024]: _type = "Task" [ 1897.013762] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1897.023473] env[63024]: DEBUG oslo_vmware.api [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951239, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1897.044497] env[63024]: DEBUG nova.compute.manager [req-8b4d8f4b-a37a-4934-bf8c-bce4360e0d7f req-c5c39026-4642-42a1-82eb-569fc7eae53a service nova] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Detach interface failed, port_id=ece34716-8aa8-4585-95d9-ccb4eaca0ae6, reason: Instance 56d220f3-b97c-4cbe-b582-c4a4f1171472 could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 1897.346343] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edf9c7c1-6a7b-41ea-ae1f-d7bd8db129e3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.356493] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c76bac2-eda3-4355-a362-2dbe6bb0382d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.392656] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcbc226b-19d9-46e9-ba6d-54591cb2c55e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.402279] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-719e8c94-dd9b-4a38-9d0e-64c2530022c0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.420386] env[63024]: DEBUG nova.compute.provider_tree [None req-9858a7ec-7d80-4734-a8c9-df8a139f53ee tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1897.490941] env[63024]: DEBUG nova.compute.manager [None req-cf1c54d3-69e5-4078-8a17-191dcc48061f tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1897.490941] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "1448c924-7c61-4c43-a4e7-5a6dd45375cc" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 1.300s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1897.492036] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55f23d3a-3d99-4824-ae8a-8c650fe786af {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.526776] env[63024]: DEBUG oslo_vmware.api [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951239, 'name': PowerOnVM_Task, 'duration_secs': 0.471201} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1897.527055] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1897.527263] env[63024]: DEBUG nova.compute.manager [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1897.528761] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6af7865a-d4f4-4300-8b47-2a1278133f52 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1897.923887] env[63024]: DEBUG nova.scheduler.client.report [None req-9858a7ec-7d80-4734-a8c9-df8a139f53ee tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1898.003618] env[63024]: INFO nova.compute.manager [None req-cf1c54d3-69e5-4078-8a17-191dcc48061f tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] instance snapshotting [ 1898.004237] env[63024]: DEBUG nova.objects.instance [None req-cf1c54d3-69e5-4078-8a17-191dcc48061f tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lazy-loading 'flavor' on Instance uuid fe6847e2-a742-4338-983f-698c13aaefde {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1898.049890] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1898.438310] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9858a7ec-7d80-4734-a8c9-df8a139f53ee tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.823s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1898.441525] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.999s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1898.443109] env[63024]: INFO nova.compute.claims [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1898.470877] env[63024]: INFO nova.scheduler.client.report [None req-9858a7ec-7d80-4734-a8c9-df8a139f53ee tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Deleted allocations for instance f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df [ 1898.514304] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36238104-d53e-4bdc-81e9-3fab55e0d981 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.535728] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-480a994a-d352-493c-b8aa-6d210396cfd9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.811521] env[63024]: INFO nova.compute.manager [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Rebuilding instance [ 1898.858905] env[63024]: DEBUG nova.compute.manager [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1898.859875] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2473f84d-75ce-4a96-9f88-fadd5381f904 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.980570] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9858a7ec-7d80-4734-a8c9-df8a139f53ee tempest-MigrationsAdminTest-2055590921 tempest-MigrationsAdminTest-2055590921-project-member] Lock "f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.944s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1898.981551] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 3.753s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1898.981844] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-51eb2c73-7427-4339-810e-6942e5d9cb3c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1898.992904] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72e90bf4-d7df-477e-b5bc-eb23b248d539 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.050523] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-cf1c54d3-69e5-4078-8a17-191dcc48061f tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Creating Snapshot of the VM instance {{(pid=63024) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1899.051071] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-e039353f-cd46-4772-bb1d-a79eeb2fb29a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.060668] env[63024]: DEBUG oslo_vmware.api [None req-cf1c54d3-69e5-4078-8a17-191dcc48061f tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 1899.060668] env[63024]: value = "task-1951240" [ 1899.060668] env[63024]: _type = "Task" [ 1899.060668] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1899.070488] env[63024]: DEBUG oslo_vmware.api [None req-cf1c54d3-69e5-4078-8a17-191dcc48061f tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951240, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.535094] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.553s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1899.581281] env[63024]: DEBUG oslo_vmware.api [None req-cf1c54d3-69e5-4078-8a17-191dcc48061f tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951240, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.874217] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1899.874674] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ad3be059-39aa-44ab-bd51-379e20a22d7c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.886037] env[63024]: DEBUG oslo_vmware.api [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1899.886037] env[63024]: value = "task-1951241" [ 1899.886037] env[63024]: _type = "Task" [ 1899.886037] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1899.898988] env[63024]: DEBUG oslo_vmware.api [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951241, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1899.903726] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-618f3697-f80b-4095-aebd-853df61a16e3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.912520] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39585550-5a1e-430c-9486-6e4df0daf256 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.946753] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a8a6a5e-ea40-4f48-87e8-a20931ec6af9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.958230] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e9a8075-5630-40f9-8d44-ff9f7fdb4a4c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1899.981034] env[63024]: DEBUG nova.compute.provider_tree [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1900.077306] env[63024]: DEBUG oslo_vmware.api [None req-cf1c54d3-69e5-4078-8a17-191dcc48061f tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951240, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1900.396917] env[63024]: DEBUG oslo_vmware.api [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951241, 'name': PowerOffVM_Task, 'duration_secs': 0.22661} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1900.396988] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1900.397234] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1900.397998] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c6a01c1-70ec-4d3d-be9f-ae38b3713317 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.406220] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1900.406684] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2ad2635e-2068-4454-9997-8ead0ede1545 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.484578] env[63024]: DEBUG nova.scheduler.client.report [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1900.570911] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1900.571368] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1900.571693] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Deleting the datastore file [datastore1] c1fd4146-6dd3-49e9-a744-466e6168e158 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1900.572164] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-50010807-4d53-452c-a0b6-26fd3b48978c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.579999] env[63024]: DEBUG oslo_vmware.api [None req-cf1c54d3-69e5-4078-8a17-191dcc48061f tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951240, 'name': CreateSnapshot_Task, 'duration_secs': 1.077425} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1900.580289] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-cf1c54d3-69e5-4078-8a17-191dcc48061f tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Created Snapshot of the VM instance {{(pid=63024) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1900.581140] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb344ca0-374a-4485-93f6-3ab3f2e76263 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1900.586134] env[63024]: DEBUG oslo_vmware.api [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1900.586134] env[63024]: value = "task-1951243" [ 1900.586134] env[63024]: _type = "Task" [ 1900.586134] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1900.600774] env[63024]: DEBUG oslo_vmware.api [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951243, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1900.990387] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.549s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1900.991181] env[63024]: DEBUG nova.compute.manager [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1900.994458] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.547s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1900.999989] env[63024]: INFO nova.compute.claims [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1901.104655] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-cf1c54d3-69e5-4078-8a17-191dcc48061f tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Creating linked-clone VM from snapshot {{(pid=63024) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1901.105026] env[63024]: DEBUG oslo_vmware.api [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951243, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.422874} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1901.105581] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-88abfcee-e189-4c31-811b-aaed67215bf6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1901.108552] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1901.108746] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1901.108922] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1901.118033] env[63024]: DEBUG oslo_vmware.api [None req-cf1c54d3-69e5-4078-8a17-191dcc48061f tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 1901.118033] env[63024]: value = "task-1951244" [ 1901.118033] env[63024]: _type = "Task" [ 1901.118033] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1901.126739] env[63024]: DEBUG oslo_vmware.api [None req-cf1c54d3-69e5-4078-8a17-191dcc48061f tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951244, 'name': CloneVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.506051] env[63024]: DEBUG nova.compute.utils [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1901.510515] env[63024]: DEBUG nova.compute.manager [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1901.510515] env[63024]: DEBUG nova.network.neutron [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1901.597411] env[63024]: DEBUG nova.policy [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '36ec7a083bcc41c2a0b6aedfe1aba470', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e138433d59374418952a186a4d2a0f78', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1901.629172] env[63024]: DEBUG oslo_vmware.api [None req-cf1c54d3-69e5-4078-8a17-191dcc48061f tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951244, 'name': CloneVM_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1901.848199] env[63024]: DEBUG nova.network.neutron [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Successfully created port: a7a97c50-68b5-4301-99d3-7cd47c2d96d8 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1902.011018] env[63024]: DEBUG nova.compute.manager [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1902.133651] env[63024]: DEBUG oslo_vmware.api [None req-cf1c54d3-69e5-4078-8a17-191dcc48061f tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951244, 'name': CloneVM_Task} progress is 95%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.148715] env[63024]: DEBUG nova.virt.hardware [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1902.149058] env[63024]: DEBUG nova.virt.hardware [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1902.149247] env[63024]: DEBUG nova.virt.hardware [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1902.149440] env[63024]: DEBUG nova.virt.hardware [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1902.149587] env[63024]: DEBUG nova.virt.hardware [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1902.149737] env[63024]: DEBUG nova.virt.hardware [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1902.150102] env[63024]: DEBUG nova.virt.hardware [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1902.150961] env[63024]: DEBUG nova.virt.hardware [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1902.151186] env[63024]: DEBUG nova.virt.hardware [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1902.151364] env[63024]: DEBUG nova.virt.hardware [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1902.151565] env[63024]: DEBUG nova.virt.hardware [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1902.152805] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7652b90-526c-4f13-9087-6a4b97628437 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.165945] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17af1e64-5603-4966-8c1b-336635ed46aa {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.189219] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3f:14:c4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cf5bfbae-a882-4d34-be33-b31e274b3077', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c6c42da9-f98c-4f7e-94e7-39d45bc8f882', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1902.200599] env[63024]: DEBUG oslo.service.loopingcall [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1902.203795] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1902.205230] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2ce02e7a-e8ff-4760-b823-7b471bf834a3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.230049] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1902.230049] env[63024]: value = "task-1951245" [ 1902.230049] env[63024]: _type = "Task" [ 1902.230049] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1902.238847] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951245, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1902.582109] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a8988e6-454e-42e2-bb1e-3b703c313576 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.592210] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e4ac054-d756-4346-ab35-621bac303d29 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.634584] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65382ceb-1ba7-488c-b4ef-746dad4049f3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.643489] env[63024]: DEBUG oslo_vmware.api [None req-cf1c54d3-69e5-4078-8a17-191dcc48061f tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951244, 'name': CloneVM_Task, 'duration_secs': 1.221005} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1902.646107] env[63024]: INFO nova.virt.vmwareapi.vmops [None req-cf1c54d3-69e5-4078-8a17-191dcc48061f tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Created linked-clone VM from snapshot [ 1902.647614] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00db7f6d-2630-4e38-9db1-97d0e955a575 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.650502] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35918c76-2632-46f1-912c-a9aa5049f806 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.666784] env[63024]: DEBUG nova.compute.provider_tree [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1902.670619] env[63024]: DEBUG nova.virt.vmwareapi.images [None req-cf1c54d3-69e5-4078-8a17-191dcc48061f tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Uploading image 54e4ffe9-c344-412d-954f-469fecd124bc {{(pid=63024) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1902.703597] env[63024]: DEBUG oslo_vmware.rw_handles [None req-cf1c54d3-69e5-4078-8a17-191dcc48061f tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1902.703597] env[63024]: value = "vm-402165" [ 1902.703597] env[63024]: _type = "VirtualMachine" [ 1902.703597] env[63024]: }. {{(pid=63024) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1902.703888] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-e7c6370e-6ee6-4851-9bc9-df04824235da {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.714464] env[63024]: DEBUG oslo_vmware.rw_handles [None req-cf1c54d3-69e5-4078-8a17-191dcc48061f tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lease: (returnval){ [ 1902.714464] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]521a43d6-48b8-b696-91c2-f0c501cfb8b2" [ 1902.714464] env[63024]: _type = "HttpNfcLease" [ 1902.714464] env[63024]: } obtained for exporting VM: (result){ [ 1902.714464] env[63024]: value = "vm-402165" [ 1902.714464] env[63024]: _type = "VirtualMachine" [ 1902.714464] env[63024]: }. {{(pid=63024) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1902.714464] env[63024]: DEBUG oslo_vmware.api [None req-cf1c54d3-69e5-4078-8a17-191dcc48061f tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the lease: (returnval){ [ 1902.714464] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]521a43d6-48b8-b696-91c2-f0c501cfb8b2" [ 1902.714464] env[63024]: _type = "HttpNfcLease" [ 1902.714464] env[63024]: } to be ready. {{(pid=63024) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1902.722799] env[63024]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1902.722799] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]521a43d6-48b8-b696-91c2-f0c501cfb8b2" [ 1902.722799] env[63024]: _type = "HttpNfcLease" [ 1902.722799] env[63024]: } is initializing. {{(pid=63024) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1902.739291] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951245, 'name': CreateVM_Task, 'duration_secs': 0.418765} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1902.739465] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1902.740151] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1902.740300] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1902.740615] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1902.740894] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49d37e89-72c6-4eae-849c-73cc92ab4496 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1902.746268] env[63024]: DEBUG oslo_vmware.api [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1902.746268] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52fcb136-341f-8b5e-8b28-c4a52f201899" [ 1902.746268] env[63024]: _type = "Task" [ 1902.746268] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1902.755094] env[63024]: DEBUG oslo_vmware.api [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52fcb136-341f-8b5e-8b28-c4a52f201899, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1903.035364] env[63024]: DEBUG nova.compute.manager [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1903.067464] env[63024]: DEBUG nova.virt.hardware [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1903.069191] env[63024]: DEBUG nova.virt.hardware [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1903.069191] env[63024]: DEBUG nova.virt.hardware [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1903.069191] env[63024]: DEBUG nova.virt.hardware [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1903.069191] env[63024]: DEBUG nova.virt.hardware [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1903.069191] env[63024]: DEBUG nova.virt.hardware [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1903.069191] env[63024]: DEBUG nova.virt.hardware [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1903.069191] env[63024]: DEBUG nova.virt.hardware [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1903.070133] env[63024]: DEBUG nova.virt.hardware [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1903.070326] env[63024]: DEBUG nova.virt.hardware [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1903.070538] env[63024]: DEBUG nova.virt.hardware [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1903.071668] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e446c4e-0f2c-4247-8255-984e6d201e43 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.082625] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-917cf228-a528-4c51-8e07-d4a13c6e4de6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.202306] env[63024]: DEBUG nova.scheduler.client.report [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Updated inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with generation 118 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1903.202648] env[63024]: DEBUG nova.compute.provider_tree [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Updating resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b generation from 118 to 119 during operation: update_inventory {{(pid=63024) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1903.202753] env[63024]: DEBUG nova.compute.provider_tree [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1903.229954] env[63024]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1903.229954] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]521a43d6-48b8-b696-91c2-f0c501cfb8b2" [ 1903.229954] env[63024]: _type = "HttpNfcLease" [ 1903.229954] env[63024]: } is ready. {{(pid=63024) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1903.230410] env[63024]: DEBUG oslo_vmware.rw_handles [None req-cf1c54d3-69e5-4078-8a17-191dcc48061f tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1903.230410] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]521a43d6-48b8-b696-91c2-f0c501cfb8b2" [ 1903.230410] env[63024]: _type = "HttpNfcLease" [ 1903.230410] env[63024]: }. {{(pid=63024) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1903.231314] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1a5abe6-09cf-4d18-a6bf-b3405ece5a2e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.242879] env[63024]: DEBUG oslo_vmware.rw_handles [None req-cf1c54d3-69e5-4078-8a17-191dcc48061f tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528b61ae-e6ae-0c38-c0c1-3034a825833b/disk-0.vmdk from lease info. {{(pid=63024) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1903.243783] env[63024]: DEBUG oslo_vmware.rw_handles [None req-cf1c54d3-69e5-4078-8a17-191dcc48061f tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528b61ae-e6ae-0c38-c0c1-3034a825833b/disk-0.vmdk for reading. {{(pid=63024) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1903.315035] env[63024]: DEBUG oslo_vmware.api [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52fcb136-341f-8b5e-8b28-c4a52f201899, 'name': SearchDatastore_Task, 'duration_secs': 0.034601} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1903.315177] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1903.315481] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1903.315791] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1903.315981] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1903.316217] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1903.316531] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c2f3f5bc-f134-4986-9af6-35baa9ed755c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.327122] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1903.327318] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1903.328103] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-89ce8c32-3948-4979-afff-25aa8167fc31 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.335219] env[63024]: DEBUG oslo_vmware.api [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1903.335219] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]527e71d6-af47-536b-8fd6-05ef69a55a23" [ 1903.335219] env[63024]: _type = "Task" [ 1903.335219] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1903.343577] env[63024]: DEBUG oslo_vmware.api [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]527e71d6-af47-536b-8fd6-05ef69a55a23, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1903.349209] env[63024]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-2b9b65e4-f2a5-46c3-af38-3224ff896876 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.371333] env[63024]: DEBUG nova.compute.manager [req-52245ece-0578-4269-b7c1-d0fb0239d76f req-e46b0965-3630-4957-a7b8-b703320698b6 service nova] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Received event network-vif-plugged-a7a97c50-68b5-4301-99d3-7cd47c2d96d8 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1903.371524] env[63024]: DEBUG oslo_concurrency.lockutils [req-52245ece-0578-4269-b7c1-d0fb0239d76f req-e46b0965-3630-4957-a7b8-b703320698b6 service nova] Acquiring lock "43cdc362-588f-42cc-a4b2-a08fe60293a5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1903.371732] env[63024]: DEBUG oslo_concurrency.lockutils [req-52245ece-0578-4269-b7c1-d0fb0239d76f req-e46b0965-3630-4957-a7b8-b703320698b6 service nova] Lock "43cdc362-588f-42cc-a4b2-a08fe60293a5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1903.371937] env[63024]: DEBUG oslo_concurrency.lockutils [req-52245ece-0578-4269-b7c1-d0fb0239d76f req-e46b0965-3630-4957-a7b8-b703320698b6 service nova] Lock "43cdc362-588f-42cc-a4b2-a08fe60293a5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1903.372127] env[63024]: DEBUG nova.compute.manager [req-52245ece-0578-4269-b7c1-d0fb0239d76f req-e46b0965-3630-4957-a7b8-b703320698b6 service nova] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] No waiting events found dispatching network-vif-plugged-a7a97c50-68b5-4301-99d3-7cd47c2d96d8 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1903.372290] env[63024]: WARNING nova.compute.manager [req-52245ece-0578-4269-b7c1-d0fb0239d76f req-e46b0965-3630-4957-a7b8-b703320698b6 service nova] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Received unexpected event network-vif-plugged-a7a97c50-68b5-4301-99d3-7cd47c2d96d8 for instance with vm_state building and task_state spawning. [ 1903.514225] env[63024]: DEBUG nova.network.neutron [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Successfully updated port: a7a97c50-68b5-4301-99d3-7cd47c2d96d8 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1903.707803] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.713s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1903.708747] env[63024]: DEBUG nova.compute.manager [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1903.712377] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7c86cf25-d8d6-4123-928f-07d445feb5a9 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.277s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1903.712844] env[63024]: DEBUG nova.objects.instance [None req-7c86cf25-d8d6-4123-928f-07d445feb5a9 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lazy-loading 'resources' on Instance uuid 37792b57-3347-4134-a060-53359afa3298 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1903.847189] env[63024]: DEBUG oslo_vmware.api [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]527e71d6-af47-536b-8fd6-05ef69a55a23, 'name': SearchDatastore_Task, 'duration_secs': 0.010512} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1903.848135] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72ecbbd1-1618-4353-a31f-6d9825833edc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1903.855598] env[63024]: DEBUG oslo_vmware.api [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1903.855598] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5269af70-ed33-bdbc-28f7-2fb0fa3275f0" [ 1903.855598] env[63024]: _type = "Task" [ 1903.855598] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1903.864281] env[63024]: DEBUG oslo_vmware.api [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5269af70-ed33-bdbc-28f7-2fb0fa3275f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1904.015716] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Acquiring lock "refresh_cache-43cdc362-588f-42cc-a4b2-a08fe60293a5" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1904.015955] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Acquired lock "refresh_cache-43cdc362-588f-42cc-a4b2-a08fe60293a5" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1904.016247] env[63024]: DEBUG nova.network.neutron [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1904.160208] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c04691b0-0366-4fa5-8adc-17512a6dd7da tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Acquiring lock "73db94b8-cfa8-4457-bccb-d4b780edbd93" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1904.160208] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c04691b0-0366-4fa5-8adc-17512a6dd7da tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lock "73db94b8-cfa8-4457-bccb-d4b780edbd93" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1904.214738] env[63024]: DEBUG nova.compute.utils [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1904.220159] env[63024]: DEBUG nova.compute.manager [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1904.220159] env[63024]: DEBUG nova.network.neutron [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1904.290239] env[63024]: DEBUG nova.policy [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '54b7a5c8406e44e3a00cf903bc74e48d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '99c4328f2c8c4139b4eace4b465e37e3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1904.374284] env[63024]: DEBUG oslo_vmware.api [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5269af70-ed33-bdbc-28f7-2fb0fa3275f0, 'name': SearchDatastore_Task, 'duration_secs': 0.013587} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1904.374684] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1904.375012] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] c1fd4146-6dd3-49e9-a744-466e6168e158/c1fd4146-6dd3-49e9-a744-466e6168e158.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1904.375306] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d53f8aab-3b90-4465-bf00-b8c082fc7928 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.389217] env[63024]: DEBUG oslo_vmware.api [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1904.389217] env[63024]: value = "task-1951247" [ 1904.389217] env[63024]: _type = "Task" [ 1904.389217] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1904.400858] env[63024]: DEBUG oslo_vmware.api [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951247, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1904.557736] env[63024]: DEBUG nova.network.neutron [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1904.663767] env[63024]: DEBUG nova.compute.utils [None req-c04691b0-0366-4fa5-8adc-17512a6dd7da tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1904.668633] env[63024]: DEBUG nova.network.neutron [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Successfully created port: 6e038615-4146-41f3-9011-c4aaf6ffe845 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1904.724893] env[63024]: DEBUG nova.compute.manager [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1904.855134] env[63024]: DEBUG nova.network.neutron [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Updating instance_info_cache with network_info: [{"id": "a7a97c50-68b5-4301-99d3-7cd47c2d96d8", "address": "fa:16:3e:5e:f9:6e", "network": {"id": "384d05e3-ef53-40f3-8a75-21f850df070c", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-411167579-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e138433d59374418952a186a4d2a0f78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7a97c50-68", "ovs_interfaceid": "a7a97c50-68b5-4301-99d3-7cd47c2d96d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1904.884644] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3226e87d-f5c4-43e3-9587-9bcfdb7c333b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.898866] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7a3ef99-3292-4910-8559-ac154fe7725d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.906528] env[63024]: DEBUG oslo_vmware.api [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951247, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1904.935714] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-816a3a17-aa7c-49b4-b0fa-7d5b322e3585 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.944809] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-766a377f-bd70-4319-8e5c-8d61fccfcc72 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1904.960085] env[63024]: DEBUG nova.compute.provider_tree [None req-7c86cf25-d8d6-4123-928f-07d445feb5a9 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1905.172911] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c04691b0-0366-4fa5-8adc-17512a6dd7da tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lock "73db94b8-cfa8-4457-bccb-d4b780edbd93" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.013s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1905.364020] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Releasing lock "refresh_cache-43cdc362-588f-42cc-a4b2-a08fe60293a5" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1905.364020] env[63024]: DEBUG nova.compute.manager [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Instance network_info: |[{"id": "a7a97c50-68b5-4301-99d3-7cd47c2d96d8", "address": "fa:16:3e:5e:f9:6e", "network": {"id": "384d05e3-ef53-40f3-8a75-21f850df070c", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-411167579-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e138433d59374418952a186a4d2a0f78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7a97c50-68", "ovs_interfaceid": "a7a97c50-68b5-4301-99d3-7cd47c2d96d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1905.364020] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5e:f9:6e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a0a76279-3c11-4bef-b124-2a2ee13fa377', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a7a97c50-68b5-4301-99d3-7cd47c2d96d8', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1905.371701] env[63024]: DEBUG oslo.service.loopingcall [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1905.372133] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1905.372474] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3f8fb964-6f4b-4165-93ce-c162cf1979b0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.406349] env[63024]: DEBUG oslo_vmware.api [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951247, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.52445} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1905.407978] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] c1fd4146-6dd3-49e9-a744-466e6168e158/c1fd4146-6dd3-49e9-a744-466e6168e158.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1905.408422] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1905.408640] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1905.408640] env[63024]: value = "task-1951248" [ 1905.408640] env[63024]: _type = "Task" [ 1905.408640] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1905.408891] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4bce3429-ebd8-4741-937b-7777a2cbcf25 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.427279] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951248, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1905.429417] env[63024]: DEBUG oslo_vmware.api [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1905.429417] env[63024]: value = "task-1951249" [ 1905.429417] env[63024]: _type = "Task" [ 1905.429417] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1905.440060] env[63024]: DEBUG oslo_vmware.api [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951249, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1905.487804] env[63024]: ERROR nova.scheduler.client.report [None req-7c86cf25-d8d6-4123-928f-07d445feb5a9 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [req-910b5df7-d5aa-4a2b-a604-7415557fa9fe] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 89dfa68a-133e-436f-a9f1-86051f9fb96b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-910b5df7-d5aa-4a2b-a604-7415557fa9fe"}]} [ 1905.506319] env[63024]: DEBUG nova.scheduler.client.report [None req-7c86cf25-d8d6-4123-928f-07d445feb5a9 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Refreshing inventories for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1905.519894] env[63024]: DEBUG nova.scheduler.client.report [None req-7c86cf25-d8d6-4123-928f-07d445feb5a9 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Updating ProviderTree inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1905.520143] env[63024]: DEBUG nova.compute.provider_tree [None req-7c86cf25-d8d6-4123-928f-07d445feb5a9 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1905.534528] env[63024]: DEBUG nova.scheduler.client.report [None req-7c86cf25-d8d6-4123-928f-07d445feb5a9 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Refreshing aggregate associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, aggregates: None {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1905.556119] env[63024]: DEBUG nova.scheduler.client.report [None req-7c86cf25-d8d6-4123-928f-07d445feb5a9 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Refreshing trait associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1905.725473] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquiring lock "ea24d375-ba88-42ca-a07e-52000ec613c0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1905.725736] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "ea24d375-ba88-42ca-a07e-52000ec613c0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1905.742610] env[63024]: DEBUG nova.compute.manager [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1905.778586] env[63024]: DEBUG nova.virt.hardware [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1905.779085] env[63024]: DEBUG nova.virt.hardware [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1905.779085] env[63024]: DEBUG nova.virt.hardware [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1905.779200] env[63024]: DEBUG nova.virt.hardware [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1905.779340] env[63024]: DEBUG nova.virt.hardware [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1905.779489] env[63024]: DEBUG nova.virt.hardware [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1905.779727] env[63024]: DEBUG nova.virt.hardware [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1905.779885] env[63024]: DEBUG nova.virt.hardware [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1905.780058] env[63024]: DEBUG nova.virt.hardware [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1905.780222] env[63024]: DEBUG nova.virt.hardware [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1905.780392] env[63024]: DEBUG nova.virt.hardware [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1905.781829] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78e6401e-77f6-46a1-a827-90dcea284842 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.791123] env[63024]: DEBUG nova.compute.manager [req-d20ae5e3-e46e-48fa-b13d-b0de61ac1e83 req-49391040-3c1b-4352-8b13-4ab2897d40f5 service nova] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Received event network-changed-a7a97c50-68b5-4301-99d3-7cd47c2d96d8 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1905.791301] env[63024]: DEBUG nova.compute.manager [req-d20ae5e3-e46e-48fa-b13d-b0de61ac1e83 req-49391040-3c1b-4352-8b13-4ab2897d40f5 service nova] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Refreshing instance network info cache due to event network-changed-a7a97c50-68b5-4301-99d3-7cd47c2d96d8. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1905.791572] env[63024]: DEBUG oslo_concurrency.lockutils [req-d20ae5e3-e46e-48fa-b13d-b0de61ac1e83 req-49391040-3c1b-4352-8b13-4ab2897d40f5 service nova] Acquiring lock "refresh_cache-43cdc362-588f-42cc-a4b2-a08fe60293a5" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1905.791690] env[63024]: DEBUG oslo_concurrency.lockutils [req-d20ae5e3-e46e-48fa-b13d-b0de61ac1e83 req-49391040-3c1b-4352-8b13-4ab2897d40f5 service nova] Acquired lock "refresh_cache-43cdc362-588f-42cc-a4b2-a08fe60293a5" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1905.791937] env[63024]: DEBUG nova.network.neutron [req-d20ae5e3-e46e-48fa-b13d-b0de61ac1e83 req-49391040-3c1b-4352-8b13-4ab2897d40f5 service nova] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Refreshing network info cache for port a7a97c50-68b5-4301-99d3-7cd47c2d96d8 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1905.807275] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de7f38c4-531a-4c65-845c-0d8f83736e75 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.922965] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951248, 'name': CreateVM_Task, 'duration_secs': 0.476389} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1905.925666] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1905.926628] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1905.926765] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1905.927097] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1905.927359] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b759bc5-91e6-4ba9-96f8-2a1764cd49bd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.934781] env[63024]: DEBUG oslo_vmware.api [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Waiting for the task: (returnval){ [ 1905.934781] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52ba96da-fd75-9e91-e343-3dc016406f03" [ 1905.934781] env[63024]: _type = "Task" [ 1905.934781] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1905.942879] env[63024]: DEBUG oslo_vmware.api [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951249, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084314} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1905.944194] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1905.947863] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b5270a7-e545-45b3-8867-a19ccbef3e9c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.950646] env[63024]: DEBUG oslo_vmware.api [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52ba96da-fd75-9e91-e343-3dc016406f03, 'name': SearchDatastore_Task, 'duration_secs': 0.011227} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1905.951263] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1905.951493] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1905.951792] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1905.951920] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1905.952114] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1905.952366] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6f071df5-2b9c-411c-af02-0af0ba502a71 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.975672] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Reconfiguring VM instance instance-0000002d to attach disk [datastore1] c1fd4146-6dd3-49e9-a744-466e6168e158/c1fd4146-6dd3-49e9-a744-466e6168e158.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1905.976687] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d6231eee-38fc-4fb0-b730-37e88eb8e845 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1905.994931] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1905.995129] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1905.996470] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c955df7b-1d85-4a3a-9446-87947ccee20e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.003420] env[63024]: DEBUG oslo_vmware.api [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Waiting for the task: (returnval){ [ 1906.003420] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]524a8f2b-63c5-b8f1-899b-2967e9651e48" [ 1906.003420] env[63024]: _type = "Task" [ 1906.003420] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1906.005574] env[63024]: DEBUG oslo_vmware.api [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1906.005574] env[63024]: value = "task-1951250" [ 1906.005574] env[63024]: _type = "Task" [ 1906.005574] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1906.022867] env[63024]: DEBUG oslo_vmware.api [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951250, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1906.023389] env[63024]: DEBUG oslo_vmware.api [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]524a8f2b-63c5-b8f1-899b-2967e9651e48, 'name': SearchDatastore_Task, 'duration_secs': 0.011793} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1906.024079] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b922a79-1080-43a5-807f-feda77ee3ab7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.030854] env[63024]: DEBUG oslo_vmware.api [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Waiting for the task: (returnval){ [ 1906.030854] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]524687c1-0551-ea1e-92bd-2e42e4a34969" [ 1906.030854] env[63024]: _type = "Task" [ 1906.030854] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1906.043058] env[63024]: DEBUG oslo_vmware.api [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]524687c1-0551-ea1e-92bd-2e42e4a34969, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1906.129039] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-805b38ba-0de2-4c98-8222-b5abb41b9001 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.137194] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbffbdd1-eff9-44b3-9177-a21317dff51e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.168953] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2485638-10f9-4681-a4c8-94d25a3c5478 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.177566] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32d76cc7-c746-44c5-83df-538774edd42f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.192444] env[63024]: DEBUG nova.compute.provider_tree [None req-7c86cf25-d8d6-4123-928f-07d445feb5a9 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1906.227911] env[63024]: DEBUG nova.compute.manager [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1906.268131] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c04691b0-0366-4fa5-8adc-17512a6dd7da tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Acquiring lock "73db94b8-cfa8-4457-bccb-d4b780edbd93" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1906.269165] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c04691b0-0366-4fa5-8adc-17512a6dd7da tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lock "73db94b8-cfa8-4457-bccb-d4b780edbd93" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1906.269165] env[63024]: INFO nova.compute.manager [None req-c04691b0-0366-4fa5-8adc-17512a6dd7da tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Attaching volume ded8b478-8973-478c-b264-5807871774d2 to /dev/sdb [ 1906.293984] env[63024]: DEBUG nova.network.neutron [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Successfully updated port: 6e038615-4146-41f3-9011-c4aaf6ffe845 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1906.315844] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b247d6c3-d373-404f-922b-ef52dde5f1df {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.326037] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94d4b88e-4a82-4471-8774-558a8cde4c14 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.349468] env[63024]: DEBUG nova.virt.block_device [None req-c04691b0-0366-4fa5-8adc-17512a6dd7da tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Updating existing volume attachment record: e34501f6-1fe7-4695-bfaf-32a1829ec5f3 {{(pid=63024) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1906.518843] env[63024]: DEBUG oslo_vmware.api [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951250, 'name': ReconfigVM_Task, 'duration_secs': 0.402006} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1906.519242] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Reconfigured VM instance instance-0000002d to attach disk [datastore1] c1fd4146-6dd3-49e9-a744-466e6168e158/c1fd4146-6dd3-49e9-a744-466e6168e158.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1906.519908] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0ce55aeb-59de-452d-9a69-4b7532d2fb47 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.527502] env[63024]: DEBUG oslo_vmware.api [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1906.527502] env[63024]: value = "task-1951252" [ 1906.527502] env[63024]: _type = "Task" [ 1906.527502] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1906.531999] env[63024]: DEBUG nova.network.neutron [req-d20ae5e3-e46e-48fa-b13d-b0de61ac1e83 req-49391040-3c1b-4352-8b13-4ab2897d40f5 service nova] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Updated VIF entry in instance network info cache for port a7a97c50-68b5-4301-99d3-7cd47c2d96d8. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1906.532430] env[63024]: DEBUG nova.network.neutron [req-d20ae5e3-e46e-48fa-b13d-b0de61ac1e83 req-49391040-3c1b-4352-8b13-4ab2897d40f5 service nova] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Updating instance_info_cache with network_info: [{"id": "a7a97c50-68b5-4301-99d3-7cd47c2d96d8", "address": "fa:16:3e:5e:f9:6e", "network": {"id": "384d05e3-ef53-40f3-8a75-21f850df070c", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-411167579-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e138433d59374418952a186a4d2a0f78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7a97c50-68", "ovs_interfaceid": "a7a97c50-68b5-4301-99d3-7cd47c2d96d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1906.543434] env[63024]: DEBUG oslo_vmware.api [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951252, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1906.547991] env[63024]: DEBUG oslo_vmware.api [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]524687c1-0551-ea1e-92bd-2e42e4a34969, 'name': SearchDatastore_Task, 'duration_secs': 0.012594} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1906.548306] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1906.548551] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 43cdc362-588f-42cc-a4b2-a08fe60293a5/43cdc362-588f-42cc-a4b2-a08fe60293a5.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1906.548851] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-86599697-5821-46c1-8b4c-f8bed6305d7c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1906.558051] env[63024]: DEBUG oslo_vmware.api [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Waiting for the task: (returnval){ [ 1906.558051] env[63024]: value = "task-1951253" [ 1906.558051] env[63024]: _type = "Task" [ 1906.558051] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1906.568046] env[63024]: DEBUG oslo_vmware.api [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Task: {'id': task-1951253, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1906.727491] env[63024]: DEBUG nova.scheduler.client.report [None req-7c86cf25-d8d6-4123-928f-07d445feb5a9 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Updated inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with generation 120 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1906.727796] env[63024]: DEBUG nova.compute.provider_tree [None req-7c86cf25-d8d6-4123-928f-07d445feb5a9 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Updating resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b generation from 120 to 121 during operation: update_inventory {{(pid=63024) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1906.728060] env[63024]: DEBUG nova.compute.provider_tree [None req-7c86cf25-d8d6-4123-928f-07d445feb5a9 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1906.749799] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1906.794887] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquiring lock "refresh_cache-a0a9ea07-dda8-45b4-bab9-cdaf683c0a21" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1906.795998] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquired lock "refresh_cache-a0a9ea07-dda8-45b4-bab9-cdaf683c0a21" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1906.795998] env[63024]: DEBUG nova.network.neutron [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1907.035467] env[63024]: DEBUG oslo_concurrency.lockutils [req-d20ae5e3-e46e-48fa-b13d-b0de61ac1e83 req-49391040-3c1b-4352-8b13-4ab2897d40f5 service nova] Releasing lock "refresh_cache-43cdc362-588f-42cc-a4b2-a08fe60293a5" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1907.040478] env[63024]: DEBUG oslo_vmware.api [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951252, 'name': Rename_Task, 'duration_secs': 0.159733} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1907.040677] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1907.040860] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bcbe157e-0199-48bc-8ac3-e3791cb9be37 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.051879] env[63024]: DEBUG oslo_vmware.api [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1907.051879] env[63024]: value = "task-1951256" [ 1907.051879] env[63024]: _type = "Task" [ 1907.051879] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1907.069452] env[63024]: DEBUG oslo_vmware.api [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951256, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.072675] env[63024]: DEBUG oslo_vmware.api [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Task: {'id': task-1951253, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.233983] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7c86cf25-d8d6-4123-928f-07d445feb5a9 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.521s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1907.236512] env[63024]: DEBUG oslo_concurrency.lockutils [None req-be9909d5-91df-4a2a-b197-4a7a1bb56351 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.467s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1907.236770] env[63024]: DEBUG nova.objects.instance [None req-be9909d5-91df-4a2a-b197-4a7a1bb56351 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Lazy-loading 'resources' on Instance uuid 7cf0ac90-d87d-4644-8a88-da5328d1721d {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1907.253916] env[63024]: INFO nova.scheduler.client.report [None req-7c86cf25-d8d6-4123-928f-07d445feb5a9 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Deleted allocations for instance 37792b57-3347-4134-a060-53359afa3298 [ 1907.331179] env[63024]: DEBUG nova.network.neutron [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1907.493864] env[63024]: DEBUG nova.network.neutron [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Updating instance_info_cache with network_info: [{"id": "6e038615-4146-41f3-9011-c4aaf6ffe845", "address": "fa:16:3e:a8:d0:4f", "network": {"id": "0719de66-1f31-4596-a9a1-11d65b13c2e5", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1221667646-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "99c4328f2c8c4139b4eace4b465e37e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e0c77754-4085-434b-a3e8-d61be099ac67", "external-id": "nsx-vlan-transportzone-822", "segmentation_id": 822, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e038615-41", "ovs_interfaceid": "6e038615-4146-41f3-9011-c4aaf6ffe845", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1907.566942] env[63024]: DEBUG oslo_vmware.api [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951256, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.574422] env[63024]: DEBUG oslo_vmware.api [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Task: {'id': task-1951253, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.57647} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1907.574734] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 43cdc362-588f-42cc-a4b2-a08fe60293a5/43cdc362-588f-42cc-a4b2-a08fe60293a5.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1907.574962] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1907.575242] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c3bf0034-744b-4f13-b841-654e37aa179c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.587389] env[63024]: DEBUG oslo_vmware.api [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Waiting for the task: (returnval){ [ 1907.587389] env[63024]: value = "task-1951257" [ 1907.587389] env[63024]: _type = "Task" [ 1907.587389] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1907.601330] env[63024]: DEBUG oslo_vmware.api [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Task: {'id': task-1951257, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1907.761880] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7c86cf25-d8d6-4123-928f-07d445feb5a9 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "37792b57-3347-4134-a060-53359afa3298" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.914s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1907.762972] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "37792b57-3347-4134-a060-53359afa3298" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 12.533s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1907.766044] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a9e53750-d6c7-4764-9ff2-d7cd0e207fe1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.777211] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f55a1d8-824d-4abe-a5aa-40d261ce3a9e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1907.832945] env[63024]: DEBUG nova.compute.manager [req-b00d1e08-3334-4b7e-ae24-a4cc707fb047 req-a0da4f89-da88-495c-a914-62e89a4ff932 service nova] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Received event network-vif-plugged-6e038615-4146-41f3-9011-c4aaf6ffe845 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1907.833265] env[63024]: DEBUG oslo_concurrency.lockutils [req-b00d1e08-3334-4b7e-ae24-a4cc707fb047 req-a0da4f89-da88-495c-a914-62e89a4ff932 service nova] Acquiring lock "a0a9ea07-dda8-45b4-bab9-cdaf683c0a21-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1907.833378] env[63024]: DEBUG oslo_concurrency.lockutils [req-b00d1e08-3334-4b7e-ae24-a4cc707fb047 req-a0da4f89-da88-495c-a914-62e89a4ff932 service nova] Lock "a0a9ea07-dda8-45b4-bab9-cdaf683c0a21-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1907.833544] env[63024]: DEBUG oslo_concurrency.lockutils [req-b00d1e08-3334-4b7e-ae24-a4cc707fb047 req-a0da4f89-da88-495c-a914-62e89a4ff932 service nova] Lock "a0a9ea07-dda8-45b4-bab9-cdaf683c0a21-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1907.833714] env[63024]: DEBUG nova.compute.manager [req-b00d1e08-3334-4b7e-ae24-a4cc707fb047 req-a0da4f89-da88-495c-a914-62e89a4ff932 service nova] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] No waiting events found dispatching network-vif-plugged-6e038615-4146-41f3-9011-c4aaf6ffe845 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1907.834034] env[63024]: WARNING nova.compute.manager [req-b00d1e08-3334-4b7e-ae24-a4cc707fb047 req-a0da4f89-da88-495c-a914-62e89a4ff932 service nova] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Received unexpected event network-vif-plugged-6e038615-4146-41f3-9011-c4aaf6ffe845 for instance with vm_state building and task_state spawning. [ 1907.834120] env[63024]: DEBUG nova.compute.manager [req-b00d1e08-3334-4b7e-ae24-a4cc707fb047 req-a0da4f89-da88-495c-a914-62e89a4ff932 service nova] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Received event network-changed-6e038615-4146-41f3-9011-c4aaf6ffe845 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1907.834271] env[63024]: DEBUG nova.compute.manager [req-b00d1e08-3334-4b7e-ae24-a4cc707fb047 req-a0da4f89-da88-495c-a914-62e89a4ff932 service nova] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Refreshing instance network info cache due to event network-changed-6e038615-4146-41f3-9011-c4aaf6ffe845. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1907.834405] env[63024]: DEBUG oslo_concurrency.lockutils [req-b00d1e08-3334-4b7e-ae24-a4cc707fb047 req-a0da4f89-da88-495c-a914-62e89a4ff932 service nova] Acquiring lock "refresh_cache-a0a9ea07-dda8-45b4-bab9-cdaf683c0a21" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1907.996859] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Releasing lock "refresh_cache-a0a9ea07-dda8-45b4-bab9-cdaf683c0a21" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1907.997189] env[63024]: DEBUG nova.compute.manager [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Instance network_info: |[{"id": "6e038615-4146-41f3-9011-c4aaf6ffe845", "address": "fa:16:3e:a8:d0:4f", "network": {"id": "0719de66-1f31-4596-a9a1-11d65b13c2e5", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1221667646-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "99c4328f2c8c4139b4eace4b465e37e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e0c77754-4085-434b-a3e8-d61be099ac67", "external-id": "nsx-vlan-transportzone-822", "segmentation_id": 822, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e038615-41", "ovs_interfaceid": "6e038615-4146-41f3-9011-c4aaf6ffe845", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1907.997497] env[63024]: DEBUG oslo_concurrency.lockutils [req-b00d1e08-3334-4b7e-ae24-a4cc707fb047 req-a0da4f89-da88-495c-a914-62e89a4ff932 service nova] Acquired lock "refresh_cache-a0a9ea07-dda8-45b4-bab9-cdaf683c0a21" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1907.997677] env[63024]: DEBUG nova.network.neutron [req-b00d1e08-3334-4b7e-ae24-a4cc707fb047 req-a0da4f89-da88-495c-a914-62e89a4ff932 service nova] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Refreshing network info cache for port 6e038615-4146-41f3-9011-c4aaf6ffe845 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1907.998930] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a8:d0:4f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e0c77754-4085-434b-a3e8-d61be099ac67', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6e038615-4146-41f3-9011-c4aaf6ffe845', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1908.006861] env[63024]: DEBUG oslo.service.loopingcall [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1908.012082] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1908.012768] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8cc88766-b85f-4923-bc3e-90de3c2dc204 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.037766] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1908.037766] env[63024]: value = "task-1951258" [ 1908.037766] env[63024]: _type = "Task" [ 1908.037766] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1908.051983] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951258, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1908.066105] env[63024]: DEBUG oslo_vmware.api [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951256, 'name': PowerOnVM_Task, 'duration_secs': 0.586618} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1908.066389] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1908.066586] env[63024]: DEBUG nova.compute.manager [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1908.067392] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da5a678d-c7d2-45ed-a720-aa6dee832899 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.102174] env[63024]: DEBUG oslo_vmware.api [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Task: {'id': task-1951257, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084798} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1908.102479] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1908.103366] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc518229-8c6b-4f02-bf2c-9ad145182280 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.130980] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Reconfiguring VM instance instance-0000004f to attach disk [datastore1] 43cdc362-588f-42cc-a4b2-a08fe60293a5/43cdc362-588f-42cc-a4b2-a08fe60293a5.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1908.136457] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-af32fb83-a822-4517-b6fe-fbe1734252de {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.159444] env[63024]: DEBUG oslo_vmware.api [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Waiting for the task: (returnval){ [ 1908.159444] env[63024]: value = "task-1951259" [ 1908.159444] env[63024]: _type = "Task" [ 1908.159444] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1908.171561] env[63024]: DEBUG oslo_vmware.api [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Task: {'id': task-1951259, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1908.255214] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd0d20d9-d2c7-47d5-8f6c-fc42d24b2d10 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.263386] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2fd026f-e2d8-478f-b6a6-c20eff57bfde {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.297194] env[63024]: DEBUG nova.network.neutron [req-b00d1e08-3334-4b7e-ae24-a4cc707fb047 req-a0da4f89-da88-495c-a914-62e89a4ff932 service nova] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Updated VIF entry in instance network info cache for port 6e038615-4146-41f3-9011-c4aaf6ffe845. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1908.297581] env[63024]: DEBUG nova.network.neutron [req-b00d1e08-3334-4b7e-ae24-a4cc707fb047 req-a0da4f89-da88-495c-a914-62e89a4ff932 service nova] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Updating instance_info_cache with network_info: [{"id": "6e038615-4146-41f3-9011-c4aaf6ffe845", "address": "fa:16:3e:a8:d0:4f", "network": {"id": "0719de66-1f31-4596-a9a1-11d65b13c2e5", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1221667646-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "99c4328f2c8c4139b4eace4b465e37e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e0c77754-4085-434b-a3e8-d61be099ac67", "external-id": "nsx-vlan-transportzone-822", "segmentation_id": 822, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e038615-41", "ovs_interfaceid": "6e038615-4146-41f3-9011-c4aaf6ffe845", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1908.299471] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21e634ee-fbee-461c-87c4-1a6686faa108 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.308740] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe57750e-df85-4abd-9364-dd6293972760 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.327880] env[63024]: DEBUG nova.compute.provider_tree [None req-be9909d5-91df-4a2a-b197-4a7a1bb56351 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1908.335832] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "37792b57-3347-4134-a060-53359afa3298" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.573s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1908.549468] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951258, 'name': CreateVM_Task, 'duration_secs': 0.441039} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1908.549714] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1908.550597] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1908.550886] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1908.551357] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1908.551765] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-01614e15-5f96-499f-adbc-043db92f54c2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.558504] env[63024]: DEBUG oslo_vmware.api [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1908.558504] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5203857c-56f0-6a07-d8eb-d0afdc3489fe" [ 1908.558504] env[63024]: _type = "Task" [ 1908.558504] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1908.574061] env[63024]: DEBUG oslo_vmware.api [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5203857c-56f0-6a07-d8eb-d0afdc3489fe, 'name': SearchDatastore_Task, 'duration_secs': 0.013595} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1908.574388] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1908.574627] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1908.574865] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1908.575019] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1908.575207] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1908.575472] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2742e5b5-5590-4819-97b2-a86dc1060d60 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.588258] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1908.588451] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1908.590710] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9db5dd71-d90a-4426-b1d9-6346625d6917 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1908.593620] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1908.598243] env[63024]: DEBUG oslo_vmware.api [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1908.598243] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5288b896-a5cd-4b5c-e139-f11d536d61fe" [ 1908.598243] env[63024]: _type = "Task" [ 1908.598243] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1908.606701] env[63024]: DEBUG oslo_vmware.api [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5288b896-a5cd-4b5c-e139-f11d536d61fe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1908.670335] env[63024]: DEBUG oslo_vmware.api [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Task: {'id': task-1951259, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1908.803308] env[63024]: DEBUG oslo_concurrency.lockutils [req-b00d1e08-3334-4b7e-ae24-a4cc707fb047 req-a0da4f89-da88-495c-a914-62e89a4ff932 service nova] Releasing lock "refresh_cache-a0a9ea07-dda8-45b4-bab9-cdaf683c0a21" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1908.867997] env[63024]: DEBUG nova.scheduler.client.report [None req-be9909d5-91df-4a2a-b197-4a7a1bb56351 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Updated inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with generation 121 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1908.868239] env[63024]: DEBUG nova.compute.provider_tree [None req-be9909d5-91df-4a2a-b197-4a7a1bb56351 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Updating resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b generation from 121 to 122 during operation: update_inventory {{(pid=63024) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1908.868453] env[63024]: DEBUG nova.compute.provider_tree [None req-be9909d5-91df-4a2a-b197-4a7a1bb56351 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1909.109804] env[63024]: DEBUG oslo_vmware.api [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5288b896-a5cd-4b5c-e139-f11d536d61fe, 'name': SearchDatastore_Task, 'duration_secs': 0.012221} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1909.110849] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ccb5c351-fc78-45aa-9115-9415171fb60b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.117209] env[63024]: DEBUG oslo_vmware.api [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1909.117209] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5204595c-e6df-19d5-88e0-cc0d1177f8f8" [ 1909.117209] env[63024]: _type = "Task" [ 1909.117209] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1909.125826] env[63024]: DEBUG oslo_vmware.api [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5204595c-e6df-19d5-88e0-cc0d1177f8f8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1909.171509] env[63024]: DEBUG oslo_vmware.api [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Task: {'id': task-1951259, 'name': ReconfigVM_Task, 'duration_secs': 0.563885} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1909.171509] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Reconfigured VM instance instance-0000004f to attach disk [datastore1] 43cdc362-588f-42cc-a4b2-a08fe60293a5/43cdc362-588f-42cc-a4b2-a08fe60293a5.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1909.172079] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-84b4d98f-0e1b-4903-8aaf-4851f894e181 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.179982] env[63024]: DEBUG oslo_vmware.api [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Waiting for the task: (returnval){ [ 1909.179982] env[63024]: value = "task-1951261" [ 1909.179982] env[63024]: _type = "Task" [ 1909.179982] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1909.188915] env[63024]: DEBUG oslo_vmware.api [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Task: {'id': task-1951261, 'name': Rename_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1909.377044] env[63024]: DEBUG oslo_concurrency.lockutils [None req-be9909d5-91df-4a2a-b197-4a7a1bb56351 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.138s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1909.377044] env[63024]: DEBUG oslo_concurrency.lockutils [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.089s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1909.379413] env[63024]: INFO nova.compute.claims [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1909.421617] env[63024]: INFO nova.scheduler.client.report [None req-be9909d5-91df-4a2a-b197-4a7a1bb56351 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Deleted allocations for instance 7cf0ac90-d87d-4644-8a88-da5328d1721d [ 1909.629196] env[63024]: DEBUG oslo_vmware.api [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5204595c-e6df-19d5-88e0-cc0d1177f8f8, 'name': SearchDatastore_Task, 'duration_secs': 0.011255} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1909.629506] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1909.629781] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] a0a9ea07-dda8-45b4-bab9-cdaf683c0a21/a0a9ea07-dda8-45b4-bab9-cdaf683c0a21.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1909.630063] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-95b3079d-2206-46d5-9944-36c2c0998a76 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.638114] env[63024]: DEBUG oslo_vmware.api [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1909.638114] env[63024]: value = "task-1951262" [ 1909.638114] env[63024]: _type = "Task" [ 1909.638114] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1909.646886] env[63024]: DEBUG oslo_vmware.api [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951262, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1909.694604] env[63024]: DEBUG oslo_vmware.api [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Task: {'id': task-1951261, 'name': Rename_Task, 'duration_secs': 0.297606} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1909.694974] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1909.695728] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5d6a9c31-e83c-45b0-99c6-b0e3ace64634 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.704725] env[63024]: DEBUG oslo_vmware.api [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Waiting for the task: (returnval){ [ 1909.704725] env[63024]: value = "task-1951263" [ 1909.704725] env[63024]: _type = "Task" [ 1909.704725] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1909.713776] env[63024]: DEBUG oslo_vmware.api [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Task: {'id': task-1951263, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1909.875843] env[63024]: DEBUG oslo_concurrency.lockutils [None req-926b15e0-1446-4562-84da-d49161ba98f4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquiring lock "01b8072a-4483-4932-8294-7e5b48e6b203" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1909.876316] env[63024]: DEBUG oslo_concurrency.lockutils [None req-926b15e0-1446-4562-84da-d49161ba98f4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Lock "01b8072a-4483-4932-8294-7e5b48e6b203" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1909.935914] env[63024]: DEBUG oslo_concurrency.lockutils [None req-be9909d5-91df-4a2a-b197-4a7a1bb56351 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Lock "7cf0ac90-d87d-4644-8a88-da5328d1721d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.557s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1909.935914] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "7cf0ac90-d87d-4644-8a88-da5328d1721d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 14.700s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1909.935914] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-554aaacb-ee21-42a3-b1f1-ddaf160cb764 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.946587] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6588198f-114b-40b5-9ac1-4065a75c6044 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.150423] env[63024]: DEBUG oslo_vmware.api [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951262, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1910.189020] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e9670a20-73a3-40df-8c25-6a89af387d93 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquiring lock "9ca6342c-55bd-4c78-9fa6-3caf4ec744bc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1910.189341] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e9670a20-73a3-40df-8c25-6a89af387d93 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Lock "9ca6342c-55bd-4c78-9fa6-3caf4ec744bc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1910.189785] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e9670a20-73a3-40df-8c25-6a89af387d93 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquiring lock "9ca6342c-55bd-4c78-9fa6-3caf4ec744bc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1910.190061] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e9670a20-73a3-40df-8c25-6a89af387d93 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Lock "9ca6342c-55bd-4c78-9fa6-3caf4ec744bc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1910.190333] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e9670a20-73a3-40df-8c25-6a89af387d93 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Lock "9ca6342c-55bd-4c78-9fa6-3caf4ec744bc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1910.192902] env[63024]: INFO nova.compute.manager [None req-e9670a20-73a3-40df-8c25-6a89af387d93 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Terminating instance [ 1910.217978] env[63024]: DEBUG oslo_vmware.api [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Task: {'id': task-1951263, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1910.256042] env[63024]: DEBUG oslo_concurrency.lockutils [None req-237bc6e2-b9d3-4eab-86ae-858f8d0a2314 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Acquiring lock "ac60546a-37b2-4d2a-8505-61fe202e2ed0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1910.256382] env[63024]: DEBUG oslo_concurrency.lockutils [None req-237bc6e2-b9d3-4eab-86ae-858f8d0a2314 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Lock "ac60546a-37b2-4d2a-8505-61fe202e2ed0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1910.256717] env[63024]: DEBUG oslo_concurrency.lockutils [None req-237bc6e2-b9d3-4eab-86ae-858f8d0a2314 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Acquiring lock "ac60546a-37b2-4d2a-8505-61fe202e2ed0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1910.256997] env[63024]: DEBUG oslo_concurrency.lockutils [None req-237bc6e2-b9d3-4eab-86ae-858f8d0a2314 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Lock "ac60546a-37b2-4d2a-8505-61fe202e2ed0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1910.257277] env[63024]: DEBUG oslo_concurrency.lockutils [None req-237bc6e2-b9d3-4eab-86ae-858f8d0a2314 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Lock "ac60546a-37b2-4d2a-8505-61fe202e2ed0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1910.260107] env[63024]: INFO nova.compute.manager [None req-237bc6e2-b9d3-4eab-86ae-858f8d0a2314 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Terminating instance [ 1910.379593] env[63024]: DEBUG nova.compute.utils [None req-926b15e0-1446-4562-84da-d49161ba98f4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1910.502604] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "7cf0ac90-d87d-4644-8a88-da5328d1721d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.567s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1910.654721] env[63024]: DEBUG oslo_vmware.api [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951262, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.727203} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1910.655063] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] a0a9ea07-dda8-45b4-bab9-cdaf683c0a21/a0a9ea07-dda8-45b4-bab9-cdaf683c0a21.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1910.655367] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1910.655641] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b9baa8cb-f121-4b63-b73b-4447a34a2fb7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.667982] env[63024]: DEBUG oslo_vmware.api [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1910.667982] env[63024]: value = "task-1951264" [ 1910.667982] env[63024]: _type = "Task" [ 1910.667982] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1910.680830] env[63024]: DEBUG oslo_vmware.api [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951264, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1910.697391] env[63024]: DEBUG nova.compute.manager [None req-e9670a20-73a3-40df-8c25-6a89af387d93 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1910.697821] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e9670a20-73a3-40df-8c25-6a89af387d93 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1910.701742] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f34a4509-683a-402e-af44-6ea7d423d36e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.712110] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9670a20-73a3-40df-8c25-6a89af387d93 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1910.712798] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5269099c-d5cc-4ba5-bd27-100f2761d93e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.720533] env[63024]: DEBUG oslo_vmware.api [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Task: {'id': task-1951263, 'name': PowerOnVM_Task, 'duration_secs': 0.622865} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1910.722007] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1910.722249] env[63024]: INFO nova.compute.manager [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Took 7.69 seconds to spawn the instance on the hypervisor. [ 1910.722448] env[63024]: DEBUG nova.compute.manager [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1910.723478] env[63024]: DEBUG oslo_vmware.api [None req-e9670a20-73a3-40df-8c25-6a89af387d93 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1910.723478] env[63024]: value = "task-1951265" [ 1910.723478] env[63024]: _type = "Task" [ 1910.723478] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1910.724270] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a9fd634-e176-49b1-993a-41237228dd55 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.746248] env[63024]: DEBUG oslo_vmware.api [None req-e9670a20-73a3-40df-8c25-6a89af387d93 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951265, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1910.766315] env[63024]: DEBUG nova.compute.manager [None req-237bc6e2-b9d3-4eab-86ae-858f8d0a2314 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1910.766315] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-237bc6e2-b9d3-4eab-86ae-858f8d0a2314 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1910.767391] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f40a4f9-b5e8-484b-b233-d9fb18d515ec {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.777632] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-237bc6e2-b9d3-4eab-86ae-858f8d0a2314 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1910.777632] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1cd9ddfb-6ab8-48ab-82b8-075c3345681c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.807100] env[63024]: DEBUG oslo_vmware.api [None req-237bc6e2-b9d3-4eab-86ae-858f8d0a2314 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Waiting for the task: (returnval){ [ 1910.807100] env[63024]: value = "task-1951266" [ 1910.807100] env[63024]: _type = "Task" [ 1910.807100] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1910.821107] env[63024]: DEBUG oslo_vmware.api [None req-237bc6e2-b9d3-4eab-86ae-858f8d0a2314 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951266, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1910.839321] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-487f9ecc-3cfc-4e06-b30e-ad3475611645 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.848790] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7cfd754-bc5f-43ef-8252-9648b10da6f2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.882464] env[63024]: DEBUG oslo_concurrency.lockutils [None req-926b15e0-1446-4562-84da-d49161ba98f4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Lock "01b8072a-4483-4932-8294-7e5b48e6b203" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1910.883975] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c40cf89d-3d1e-446d-b0a5-4fa1d41bc293 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.894232] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1817212f-af9e-42f7-a39e-1791d3e0149e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1910.908832] env[63024]: DEBUG nova.compute.provider_tree [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1911.180239] env[63024]: DEBUG oslo_vmware.api [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951264, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.147665} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1911.180591] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1911.181410] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55b9977f-94fa-4df5-8d74-b727305f9764 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.204428] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Reconfiguring VM instance instance-00000050 to attach disk [datastore1] a0a9ea07-dda8-45b4-bab9-cdaf683c0a21/a0a9ea07-dda8-45b4-bab9-cdaf683c0a21.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1911.204566] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-32f848d3-cab0-49f2-bfc4-8afd78a94fc2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.226897] env[63024]: DEBUG oslo_vmware.api [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1911.226897] env[63024]: value = "task-1951267" [ 1911.226897] env[63024]: _type = "Task" [ 1911.226897] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1911.241424] env[63024]: DEBUG oslo_vmware.api [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951267, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1911.244667] env[63024]: DEBUG oslo_vmware.api [None req-e9670a20-73a3-40df-8c25-6a89af387d93 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951265, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1911.253728] env[63024]: INFO nova.compute.manager [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Took 34.83 seconds to build instance. [ 1911.317349] env[63024]: DEBUG oslo_vmware.api [None req-237bc6e2-b9d3-4eab-86ae-858f8d0a2314 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951266, 'name': PowerOffVM_Task, 'duration_secs': 0.254033} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1911.317636] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-237bc6e2-b9d3-4eab-86ae-858f8d0a2314 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1911.317810] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-237bc6e2-b9d3-4eab-86ae-858f8d0a2314 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1911.318080] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bf286963-553b-44c7-86ab-969335189763 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.412741] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-c04691b0-0366-4fa5-8adc-17512a6dd7da tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Volume attach. Driver type: vmdk {{(pid=63024) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1911.412980] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-c04691b0-0366-4fa5-8adc-17512a6dd7da tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402169', 'volume_id': 'ded8b478-8973-478c-b264-5807871774d2', 'name': 'volume-ded8b478-8973-478c-b264-5807871774d2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '73db94b8-cfa8-4457-bccb-d4b780edbd93', 'attached_at': '', 'detached_at': '', 'volume_id': 'ded8b478-8973-478c-b264-5807871774d2', 'serial': 'ded8b478-8973-478c-b264-5807871774d2'} {{(pid=63024) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1911.414040] env[63024]: DEBUG nova.scheduler.client.report [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1911.418932] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1d2681f-68ff-4d27-a634-746658b7af12 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.439970] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ef2f00f-1229-4772-b3ec-24e695b3e375 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.470699] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-c04691b0-0366-4fa5-8adc-17512a6dd7da tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] volume-ded8b478-8973-478c-b264-5807871774d2/volume-ded8b478-8973-478c-b264-5807871774d2.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1911.471532] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3d91923c-e369-42cd-8df1-d65c1742d513 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.498089] env[63024]: DEBUG oslo_vmware.api [None req-c04691b0-0366-4fa5-8adc-17512a6dd7da tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Waiting for the task: (returnval){ [ 1911.498089] env[63024]: value = "task-1951269" [ 1911.498089] env[63024]: _type = "Task" [ 1911.498089] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1911.510217] env[63024]: DEBUG oslo_vmware.api [None req-c04691b0-0366-4fa5-8adc-17512a6dd7da tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951269, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1911.577606] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-237bc6e2-b9d3-4eab-86ae-858f8d0a2314 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1911.577921] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-237bc6e2-b9d3-4eab-86ae-858f8d0a2314 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1911.578217] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-237bc6e2-b9d3-4eab-86ae-858f8d0a2314 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Deleting the datastore file [datastore1] ac60546a-37b2-4d2a-8505-61fe202e2ed0 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1911.578621] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d0664f8c-7fad-4b4f-86dc-3916da84cfd7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.592982] env[63024]: DEBUG oslo_vmware.api [None req-237bc6e2-b9d3-4eab-86ae-858f8d0a2314 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Waiting for the task: (returnval){ [ 1911.592982] env[63024]: value = "task-1951270" [ 1911.592982] env[63024]: _type = "Task" [ 1911.592982] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1911.603870] env[63024]: DEBUG oslo_vmware.api [None req-237bc6e2-b9d3-4eab-86ae-858f8d0a2314 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951270, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1911.669290] env[63024]: DEBUG nova.compute.manager [req-b09de1bf-f6db-4ee4-82b9-1da6aecd3b09 req-18658359-314a-4ac9-a4e2-e13f5188fa05 service nova] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Received event network-changed-a7a97c50-68b5-4301-99d3-7cd47c2d96d8 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1911.669769] env[63024]: DEBUG nova.compute.manager [req-b09de1bf-f6db-4ee4-82b9-1da6aecd3b09 req-18658359-314a-4ac9-a4e2-e13f5188fa05 service nova] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Refreshing instance network info cache due to event network-changed-a7a97c50-68b5-4301-99d3-7cd47c2d96d8. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1911.670239] env[63024]: DEBUG oslo_concurrency.lockutils [req-b09de1bf-f6db-4ee4-82b9-1da6aecd3b09 req-18658359-314a-4ac9-a4e2-e13f5188fa05 service nova] Acquiring lock "refresh_cache-43cdc362-588f-42cc-a4b2-a08fe60293a5" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1911.670579] env[63024]: DEBUG oslo_concurrency.lockutils [req-b09de1bf-f6db-4ee4-82b9-1da6aecd3b09 req-18658359-314a-4ac9-a4e2-e13f5188fa05 service nova] Acquired lock "refresh_cache-43cdc362-588f-42cc-a4b2-a08fe60293a5" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1911.670950] env[63024]: DEBUG nova.network.neutron [req-b09de1bf-f6db-4ee4-82b9-1da6aecd3b09 req-18658359-314a-4ac9-a4e2-e13f5188fa05 service nova] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Refreshing network info cache for port a7a97c50-68b5-4301-99d3-7cd47c2d96d8 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1911.741294] env[63024]: DEBUG oslo_vmware.api [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951267, 'name': ReconfigVM_Task, 'duration_secs': 0.361664} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1911.744537] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Reconfigured VM instance instance-00000050 to attach disk [datastore1] a0a9ea07-dda8-45b4-bab9-cdaf683c0a21/a0a9ea07-dda8-45b4-bab9-cdaf683c0a21.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1911.745330] env[63024]: DEBUG oslo_vmware.api [None req-e9670a20-73a3-40df-8c25-6a89af387d93 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951265, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1911.745579] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d4228428-3988-4e1f-8dd8-5ecc6ead7e25 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1911.756696] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b370ed28-e6bf-484e-8185-233d0e5c8019 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Lock "43cdc362-588f-42cc-a4b2-a08fe60293a5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.347s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1911.757275] env[63024]: DEBUG oslo_vmware.api [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1911.757275] env[63024]: value = "task-1951271" [ 1911.757275] env[63024]: _type = "Task" [ 1911.757275] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1911.767708] env[63024]: DEBUG oslo_vmware.api [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951271, 'name': Rename_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1911.924233] env[63024]: DEBUG oslo_concurrency.lockutils [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.547s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1911.924775] env[63024]: DEBUG nova.compute.manager [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1911.928572] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 20.417s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1911.930230] env[63024]: INFO nova.compute.claims [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1911.991665] env[63024]: DEBUG oslo_concurrency.lockutils [None req-926b15e0-1446-4562-84da-d49161ba98f4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquiring lock "01b8072a-4483-4932-8294-7e5b48e6b203" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1911.991977] env[63024]: DEBUG oslo_concurrency.lockutils [None req-926b15e0-1446-4562-84da-d49161ba98f4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Lock "01b8072a-4483-4932-8294-7e5b48e6b203" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1911.992286] env[63024]: INFO nova.compute.manager [None req-926b15e0-1446-4562-84da-d49161ba98f4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Attaching volume c9e6b177-cc1d-4c76-9cb0-d45b6ea1eeae to /dev/sdb [ 1912.010654] env[63024]: DEBUG oslo_vmware.api [None req-c04691b0-0366-4fa5-8adc-17512a6dd7da tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951269, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1912.036247] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b70b71f9-6840-4d3f-bd1a-bbb2969fba52 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.045762] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ee831fa-8935-4e1f-a4db-2a7ae02e7505 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.062127] env[63024]: DEBUG nova.virt.block_device [None req-926b15e0-1446-4562-84da-d49161ba98f4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Updating existing volume attachment record: e7b4cc7b-8160-4878-bfac-6ecd47e7f1ba {{(pid=63024) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1912.110107] env[63024]: DEBUG oslo_vmware.api [None req-237bc6e2-b9d3-4eab-86ae-858f8d0a2314 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Task: {'id': task-1951270, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.220985} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1912.110107] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-237bc6e2-b9d3-4eab-86ae-858f8d0a2314 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1912.110253] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-237bc6e2-b9d3-4eab-86ae-858f8d0a2314 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1912.110446] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-237bc6e2-b9d3-4eab-86ae-858f8d0a2314 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1912.110621] env[63024]: INFO nova.compute.manager [None req-237bc6e2-b9d3-4eab-86ae-858f8d0a2314 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Took 1.34 seconds to destroy the instance on the hypervisor. [ 1912.110864] env[63024]: DEBUG oslo.service.loopingcall [None req-237bc6e2-b9d3-4eab-86ae-858f8d0a2314 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1912.111080] env[63024]: DEBUG nova.compute.manager [-] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1912.111172] env[63024]: DEBUG nova.network.neutron [-] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1912.242690] env[63024]: DEBUG oslo_vmware.api [None req-e9670a20-73a3-40df-8c25-6a89af387d93 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951265, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1912.274477] env[63024]: DEBUG oslo_vmware.api [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951271, 'name': Rename_Task, 'duration_secs': 0.205933} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1912.274867] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1912.275175] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8f619090-6210-49ad-af4a-38e54fd68031 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.286123] env[63024]: DEBUG oslo_vmware.api [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1912.286123] env[63024]: value = "task-1951273" [ 1912.286123] env[63024]: _type = "Task" [ 1912.286123] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1912.296229] env[63024]: DEBUG oslo_vmware.api [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951273, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1912.394351] env[63024]: DEBUG oslo_concurrency.lockutils [None req-78042208-3c77-4204-afc6-2993bff06578 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Acquiring lock "43cdc362-588f-42cc-a4b2-a08fe60293a5" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1912.394483] env[63024]: DEBUG oslo_concurrency.lockutils [None req-78042208-3c77-4204-afc6-2993bff06578 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Lock "43cdc362-588f-42cc-a4b2-a08fe60293a5" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1912.394761] env[63024]: INFO nova.compute.manager [None req-78042208-3c77-4204-afc6-2993bff06578 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Rebooting instance [ 1912.431951] env[63024]: DEBUG nova.compute.utils [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1912.436187] env[63024]: DEBUG nova.compute.manager [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1912.436775] env[63024]: DEBUG nova.network.neutron [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1912.443462] env[63024]: DEBUG nova.compute.manager [req-ade42654-7703-4145-a8f2-78c39873e21d req-66f5230b-592f-4e38-8979-d80dfffa4bcd service nova] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Received event network-vif-deleted-4f25b42b-a210-4630-9dc5-b2e92c31b4f5 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1912.443604] env[63024]: INFO nova.compute.manager [req-ade42654-7703-4145-a8f2-78c39873e21d req-66f5230b-592f-4e38-8979-d80dfffa4bcd service nova] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Neutron deleted interface 4f25b42b-a210-4630-9dc5-b2e92c31b4f5; detaching it from the instance and deleting it from the info cache [ 1912.443787] env[63024]: DEBUG nova.network.neutron [req-ade42654-7703-4145-a8f2-78c39873e21d req-66f5230b-592f-4e38-8979-d80dfffa4bcd service nova] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1912.486899] env[63024]: DEBUG nova.network.neutron [req-b09de1bf-f6db-4ee4-82b9-1da6aecd3b09 req-18658359-314a-4ac9-a4e2-e13f5188fa05 service nova] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Updated VIF entry in instance network info cache for port a7a97c50-68b5-4301-99d3-7cd47c2d96d8. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1912.487424] env[63024]: DEBUG nova.network.neutron [req-b09de1bf-f6db-4ee4-82b9-1da6aecd3b09 req-18658359-314a-4ac9-a4e2-e13f5188fa05 service nova] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Updating instance_info_cache with network_info: [{"id": "a7a97c50-68b5-4301-99d3-7cd47c2d96d8", "address": "fa:16:3e:5e:f9:6e", "network": {"id": "384d05e3-ef53-40f3-8a75-21f850df070c", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-411167579-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e138433d59374418952a186a4d2a0f78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7a97c50-68", "ovs_interfaceid": "a7a97c50-68b5-4301-99d3-7cd47c2d96d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1912.501565] env[63024]: DEBUG nova.policy [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3fc112b4851e4dbeac3a69409e7bf98e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1886be852b01400aaf7a31c8fe5d4d7a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1912.514063] env[63024]: DEBUG oslo_vmware.api [None req-c04691b0-0366-4fa5-8adc-17512a6dd7da tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951269, 'name': ReconfigVM_Task, 'duration_secs': 0.719631} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1912.514384] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-c04691b0-0366-4fa5-8adc-17512a6dd7da tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Reconfigured VM instance instance-00000043 to attach disk [datastore1] volume-ded8b478-8973-478c-b264-5807871774d2/volume-ded8b478-8973-478c-b264-5807871774d2.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1912.519831] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-502fd738-90fc-4c12-9033-7fa3602af0b0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.541395] env[63024]: DEBUG oslo_vmware.api [None req-c04691b0-0366-4fa5-8adc-17512a6dd7da tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Waiting for the task: (returnval){ [ 1912.541395] env[63024]: value = "task-1951274" [ 1912.541395] env[63024]: _type = "Task" [ 1912.541395] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1912.552773] env[63024]: DEBUG oslo_vmware.api [None req-c04691b0-0366-4fa5-8adc-17512a6dd7da tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951274, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1912.674822] env[63024]: DEBUG oslo_vmware.rw_handles [None req-cf1c54d3-69e5-4078-8a17-191dcc48061f tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528b61ae-e6ae-0c38-c0c1-3034a825833b/disk-0.vmdk. {{(pid=63024) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1912.677343] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7446b3c-ca66-410b-a3e3-31131759ba16 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.684397] env[63024]: DEBUG oslo_vmware.rw_handles [None req-cf1c54d3-69e5-4078-8a17-191dcc48061f tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528b61ae-e6ae-0c38-c0c1-3034a825833b/disk-0.vmdk is in state: ready. {{(pid=63024) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1912.684576] env[63024]: ERROR oslo_vmware.rw_handles [None req-cf1c54d3-69e5-4078-8a17-191dcc48061f tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528b61ae-e6ae-0c38-c0c1-3034a825833b/disk-0.vmdk due to incomplete transfer. [ 1912.684864] env[63024]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-f1e32573-dc1e-4f75-afba-26b6d116bebc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.694138] env[63024]: DEBUG oslo_vmware.rw_handles [None req-cf1c54d3-69e5-4078-8a17-191dcc48061f tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528b61ae-e6ae-0c38-c0c1-3034a825833b/disk-0.vmdk. {{(pid=63024) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1912.694347] env[63024]: DEBUG nova.virt.vmwareapi.images [None req-cf1c54d3-69e5-4078-8a17-191dcc48061f tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Uploaded image 54e4ffe9-c344-412d-954f-469fecd124bc to the Glance image server {{(pid=63024) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1912.696726] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf1c54d3-69e5-4078-8a17-191dcc48061f tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Destroying the VM {{(pid=63024) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1912.697020] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-91846efa-76e3-4cec-b1e5-fce35c9ec9f0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.705431] env[63024]: DEBUG oslo_vmware.api [None req-cf1c54d3-69e5-4078-8a17-191dcc48061f tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 1912.705431] env[63024]: value = "task-1951277" [ 1912.705431] env[63024]: _type = "Task" [ 1912.705431] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1912.718088] env[63024]: DEBUG oslo_vmware.api [None req-cf1c54d3-69e5-4078-8a17-191dcc48061f tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951277, 'name': Destroy_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1912.745706] env[63024]: DEBUG oslo_vmware.api [None req-e9670a20-73a3-40df-8c25-6a89af387d93 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951265, 'name': PowerOffVM_Task, 'duration_secs': 1.8175} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1912.745706] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9670a20-73a3-40df-8c25-6a89af387d93 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1912.745706] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e9670a20-73a3-40df-8c25-6a89af387d93 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1912.745706] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-83bbdf09-1c58-4678-b7ea-88b335e52d25 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.793786] env[63024]: DEBUG nova.network.neutron [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Successfully created port: 6aa34054-6865-4348-9871-fd32c747ab34 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1912.803073] env[63024]: DEBUG oslo_vmware.api [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951273, 'name': PowerOnVM_Task} progress is 90%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1912.851062] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e9670a20-73a3-40df-8c25-6a89af387d93 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1912.851320] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e9670a20-73a3-40df-8c25-6a89af387d93 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1912.851531] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9670a20-73a3-40df-8c25-6a89af387d93 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Deleting the datastore file [datastore1] 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1912.851934] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-81705ab1-99e5-4d50-a9a6-1cae4b967760 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.860652] env[63024]: DEBUG oslo_vmware.api [None req-e9670a20-73a3-40df-8c25-6a89af387d93 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1912.860652] env[63024]: value = "task-1951279" [ 1912.860652] env[63024]: _type = "Task" [ 1912.860652] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1912.872512] env[63024]: DEBUG oslo_vmware.api [None req-e9670a20-73a3-40df-8c25-6a89af387d93 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951279, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1912.905516] env[63024]: DEBUG nova.network.neutron [-] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1912.914911] env[63024]: DEBUG oslo_concurrency.lockutils [None req-78042208-3c77-4204-afc6-2993bff06578 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Acquiring lock "refresh_cache-43cdc362-588f-42cc-a4b2-a08fe60293a5" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1912.946375] env[63024]: DEBUG nova.compute.manager [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1912.954324] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-470e4743-27a9-4f9c-b92d-cc2e1f86f551 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.970298] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18fe0260-38d1-4711-a156-bff77b259550 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1912.991290] env[63024]: DEBUG oslo_concurrency.lockutils [req-b09de1bf-f6db-4ee4-82b9-1da6aecd3b09 req-18658359-314a-4ac9-a4e2-e13f5188fa05 service nova] Releasing lock "refresh_cache-43cdc362-588f-42cc-a4b2-a08fe60293a5" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1912.991730] env[63024]: DEBUG oslo_concurrency.lockutils [None req-78042208-3c77-4204-afc6-2993bff06578 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Acquired lock "refresh_cache-43cdc362-588f-42cc-a4b2-a08fe60293a5" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1912.991944] env[63024]: DEBUG nova.network.neutron [None req-78042208-3c77-4204-afc6-2993bff06578 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1913.015128] env[63024]: DEBUG nova.compute.manager [req-ade42654-7703-4145-a8f2-78c39873e21d req-66f5230b-592f-4e38-8979-d80dfffa4bcd service nova] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Detach interface failed, port_id=4f25b42b-a210-4630-9dc5-b2e92c31b4f5, reason: Instance ac60546a-37b2-4d2a-8505-61fe202e2ed0 could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 1913.056683] env[63024]: DEBUG oslo_vmware.api [None req-c04691b0-0366-4fa5-8adc-17512a6dd7da tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951274, 'name': ReconfigVM_Task, 'duration_secs': 0.219627} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1913.057026] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-c04691b0-0366-4fa5-8adc-17512a6dd7da tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402169', 'volume_id': 'ded8b478-8973-478c-b264-5807871774d2', 'name': 'volume-ded8b478-8973-478c-b264-5807871774d2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '73db94b8-cfa8-4457-bccb-d4b780edbd93', 'attached_at': '', 'detached_at': '', 'volume_id': 'ded8b478-8973-478c-b264-5807871774d2', 'serial': 'ded8b478-8973-478c-b264-5807871774d2'} {{(pid=63024) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1913.217195] env[63024]: DEBUG oslo_vmware.api [None req-cf1c54d3-69e5-4078-8a17-191dcc48061f tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951277, 'name': Destroy_Task, 'duration_secs': 0.397346} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1913.217610] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-cf1c54d3-69e5-4078-8a17-191dcc48061f tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Destroyed the VM [ 1913.217768] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-cf1c54d3-69e5-4078-8a17-191dcc48061f tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Deleting Snapshot of the VM instance {{(pid=63024) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1913.218112] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-6931a307-695f-47bc-965a-036ad10a06ca {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.226055] env[63024]: DEBUG oslo_vmware.api [None req-cf1c54d3-69e5-4078-8a17-191dcc48061f tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 1913.226055] env[63024]: value = "task-1951280" [ 1913.226055] env[63024]: _type = "Task" [ 1913.226055] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1913.239730] env[63024]: DEBUG oslo_vmware.api [None req-cf1c54d3-69e5-4078-8a17-191dcc48061f tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951280, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1913.302607] env[63024]: DEBUG oslo_vmware.api [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951273, 'name': PowerOnVM_Task, 'duration_secs': 0.560473} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1913.305737] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1913.306017] env[63024]: INFO nova.compute.manager [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Took 7.56 seconds to spawn the instance on the hypervisor. [ 1913.306238] env[63024]: DEBUG nova.compute.manager [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1913.307566] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-250a2217-e276-4cab-acd6-36d548781616 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.374221] env[63024]: DEBUG oslo_vmware.api [None req-e9670a20-73a3-40df-8c25-6a89af387d93 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951279, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.251177} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1913.377065] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9670a20-73a3-40df-8c25-6a89af387d93 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1913.377290] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e9670a20-73a3-40df-8c25-6a89af387d93 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1913.377473] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e9670a20-73a3-40df-8c25-6a89af387d93 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1913.377651] env[63024]: INFO nova.compute.manager [None req-e9670a20-73a3-40df-8c25-6a89af387d93 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Took 2.68 seconds to destroy the instance on the hypervisor. [ 1913.377938] env[63024]: DEBUG oslo.service.loopingcall [None req-e9670a20-73a3-40df-8c25-6a89af387d93 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1913.378679] env[63024]: DEBUG nova.compute.manager [-] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1913.378796] env[63024]: DEBUG nova.network.neutron [-] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1913.408490] env[63024]: INFO nova.compute.manager [-] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Took 1.30 seconds to deallocate network for instance. [ 1913.465060] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6de2cfd-dc53-440a-8795-438fec616c0a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.475613] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cce5d4f2-266a-4dd0-bc76-09e874808aac {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.513112] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8753564-0611-4024-aea0-b8f7ca8c1d4d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.522819] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b93f0f3-efcf-4858-a8c5-1060c4cc16ea {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.537715] env[63024]: DEBUG nova.compute.provider_tree [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1913.739390] env[63024]: DEBUG oslo_vmware.api [None req-cf1c54d3-69e5-4078-8a17-191dcc48061f tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951280, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1913.771791] env[63024]: DEBUG nova.compute.manager [req-ce4ee7d3-3b18-4a58-b909-799570e9093c req-22602331-5f86-4359-b1d1-9e946bf5faf6 service nova] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Received event network-vif-deleted-360ebc0c-7601-4e8c-87a5-65b79b2ae569 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1913.772017] env[63024]: INFO nova.compute.manager [req-ce4ee7d3-3b18-4a58-b909-799570e9093c req-22602331-5f86-4359-b1d1-9e946bf5faf6 service nova] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Neutron deleted interface 360ebc0c-7601-4e8c-87a5-65b79b2ae569; detaching it from the instance and deleting it from the info cache [ 1913.772202] env[63024]: DEBUG nova.network.neutron [req-ce4ee7d3-3b18-4a58-b909-799570e9093c req-22602331-5f86-4359-b1d1-9e946bf5faf6 service nova] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1913.812447] env[63024]: DEBUG nova.network.neutron [None req-78042208-3c77-4204-afc6-2993bff06578 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Updating instance_info_cache with network_info: [{"id": "a7a97c50-68b5-4301-99d3-7cd47c2d96d8", "address": "fa:16:3e:5e:f9:6e", "network": {"id": "384d05e3-ef53-40f3-8a75-21f850df070c", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-411167579-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e138433d59374418952a186a4d2a0f78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7a97c50-68", "ovs_interfaceid": "a7a97c50-68b5-4301-99d3-7cd47c2d96d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1913.831872] env[63024]: INFO nova.compute.manager [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Took 35.41 seconds to build instance. [ 1913.919738] env[63024]: DEBUG oslo_concurrency.lockutils [None req-237bc6e2-b9d3-4eab-86ae-858f8d0a2314 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1913.959143] env[63024]: DEBUG nova.compute.manager [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1913.987181] env[63024]: DEBUG nova.virt.hardware [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1913.987510] env[63024]: DEBUG nova.virt.hardware [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1913.987800] env[63024]: DEBUG nova.virt.hardware [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1913.988087] env[63024]: DEBUG nova.virt.hardware [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1913.988268] env[63024]: DEBUG nova.virt.hardware [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1913.988425] env[63024]: DEBUG nova.virt.hardware [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1913.988637] env[63024]: DEBUG nova.virt.hardware [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1913.988799] env[63024]: DEBUG nova.virt.hardware [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1913.988965] env[63024]: DEBUG nova.virt.hardware [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1913.989141] env[63024]: DEBUG nova.virt.hardware [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1913.989315] env[63024]: DEBUG nova.virt.hardware [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1913.990462] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8423c8ec-f497-48b0-8290-d431066653b8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1913.999418] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a2fb3c1-868f-4d1f-a9da-868a886a993d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.042742] env[63024]: DEBUG nova.scheduler.client.report [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1914.103054] env[63024]: DEBUG nova.objects.instance [None req-c04691b0-0366-4fa5-8adc-17512a6dd7da tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lazy-loading 'flavor' on Instance uuid 73db94b8-cfa8-4457-bccb-d4b780edbd93 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1914.240756] env[63024]: DEBUG oslo_vmware.api [None req-cf1c54d3-69e5-4078-8a17-191dcc48061f tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951280, 'name': RemoveSnapshot_Task, 'duration_secs': 0.563185} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1914.240756] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-cf1c54d3-69e5-4078-8a17-191dcc48061f tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Deleted Snapshot of the VM instance {{(pid=63024) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1914.240965] env[63024]: INFO nova.compute.manager [None req-cf1c54d3-69e5-4078-8a17-191dcc48061f tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Took 15.73 seconds to snapshot the instance on the hypervisor. [ 1914.243409] env[63024]: DEBUG nova.network.neutron [-] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1914.274812] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1f4ea3ec-a5a9-4b89-9d18-5e482c0cec6a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.285517] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-933473e1-853d-4d7b-9934-0e6bfc657607 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.328452] env[63024]: DEBUG oslo_concurrency.lockutils [None req-78042208-3c77-4204-afc6-2993bff06578 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Releasing lock "refresh_cache-43cdc362-588f-42cc-a4b2-a08fe60293a5" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1914.330838] env[63024]: DEBUG nova.compute.manager [req-ce4ee7d3-3b18-4a58-b909-799570e9093c req-22602331-5f86-4359-b1d1-9e946bf5faf6 service nova] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Detach interface failed, port_id=360ebc0c-7601-4e8c-87a5-65b79b2ae569, reason: Instance 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 1914.331458] env[63024]: DEBUG nova.compute.manager [None req-78042208-3c77-4204-afc6-2993bff06578 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1914.332263] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0285999-497d-45f5-b220-a9712e6c0c95 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.335048] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6afc5113-c910-4cdd-aec5-91535fabd1d3 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "a0a9ea07-dda8-45b4-bab9-cdaf683c0a21" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.923s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1914.374627] env[63024]: DEBUG nova.network.neutron [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Successfully updated port: 6aa34054-6865-4348-9871-fd32c747ab34 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1914.548615] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.620s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1914.549177] env[63024]: DEBUG nova.compute.manager [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1914.556021] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4438d070-28f0-4340-800d-87807eee6378 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.586s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1914.556021] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4438d070-28f0-4340-800d-87807eee6378 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1914.556021] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b6261d09-9b65-4470-bf9f-663330f30e1c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.813s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1914.556021] env[63024]: DEBUG nova.objects.instance [None req-b6261d09-9b65-4470-bf9f-663330f30e1c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Lazy-loading 'resources' on Instance uuid 9edbda30-2e28-4961-a6ad-5ab34c40ed44 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1914.584126] env[63024]: INFO nova.scheduler.client.report [None req-4438d070-28f0-4340-800d-87807eee6378 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Deleted allocations for instance 1709d916-d0c4-4706-b41b-8b0ed25f3331 [ 1914.608788] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c04691b0-0366-4fa5-8adc-17512a6dd7da tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lock "73db94b8-cfa8-4457-bccb-d4b780edbd93" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.340s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1914.638501] env[63024]: DEBUG nova.compute.manager [req-1991eba8-6e25-4de5-8f02-009680716593 req-01738247-0080-4ac6-846f-8a7475ba71e6 service nova] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Received event network-vif-plugged-6aa34054-6865-4348-9871-fd32c747ab34 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1914.638718] env[63024]: DEBUG oslo_concurrency.lockutils [req-1991eba8-6e25-4de5-8f02-009680716593 req-01738247-0080-4ac6-846f-8a7475ba71e6 service nova] Acquiring lock "9e8e7b6e-1bb2-4e66-b734-2f56e31302af-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1914.638937] env[63024]: DEBUG oslo_concurrency.lockutils [req-1991eba8-6e25-4de5-8f02-009680716593 req-01738247-0080-4ac6-846f-8a7475ba71e6 service nova] Lock "9e8e7b6e-1bb2-4e66-b734-2f56e31302af-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1914.639178] env[63024]: DEBUG oslo_concurrency.lockutils [req-1991eba8-6e25-4de5-8f02-009680716593 req-01738247-0080-4ac6-846f-8a7475ba71e6 service nova] Lock "9e8e7b6e-1bb2-4e66-b734-2f56e31302af-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1914.639282] env[63024]: DEBUG nova.compute.manager [req-1991eba8-6e25-4de5-8f02-009680716593 req-01738247-0080-4ac6-846f-8a7475ba71e6 service nova] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] No waiting events found dispatching network-vif-plugged-6aa34054-6865-4348-9871-fd32c747ab34 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1914.639442] env[63024]: WARNING nova.compute.manager [req-1991eba8-6e25-4de5-8f02-009680716593 req-01738247-0080-4ac6-846f-8a7475ba71e6 service nova] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Received unexpected event network-vif-plugged-6aa34054-6865-4348-9871-fd32c747ab34 for instance with vm_state building and task_state spawning. [ 1914.639698] env[63024]: DEBUG nova.compute.manager [req-1991eba8-6e25-4de5-8f02-009680716593 req-01738247-0080-4ac6-846f-8a7475ba71e6 service nova] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Received event network-changed-6aa34054-6865-4348-9871-fd32c747ab34 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1914.639792] env[63024]: DEBUG nova.compute.manager [req-1991eba8-6e25-4de5-8f02-009680716593 req-01738247-0080-4ac6-846f-8a7475ba71e6 service nova] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Refreshing instance network info cache due to event network-changed-6aa34054-6865-4348-9871-fd32c747ab34. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1914.639976] env[63024]: DEBUG oslo_concurrency.lockutils [req-1991eba8-6e25-4de5-8f02-009680716593 req-01738247-0080-4ac6-846f-8a7475ba71e6 service nova] Acquiring lock "refresh_cache-9e8e7b6e-1bb2-4e66-b734-2f56e31302af" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1914.640277] env[63024]: DEBUG oslo_concurrency.lockutils [req-1991eba8-6e25-4de5-8f02-009680716593 req-01738247-0080-4ac6-846f-8a7475ba71e6 service nova] Acquired lock "refresh_cache-9e8e7b6e-1bb2-4e66-b734-2f56e31302af" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1914.640464] env[63024]: DEBUG nova.network.neutron [req-1991eba8-6e25-4de5-8f02-009680716593 req-01738247-0080-4ac6-846f-8a7475ba71e6 service nova] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Refreshing network info cache for port 6aa34054-6865-4348-9871-fd32c747ab34 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1914.748661] env[63024]: INFO nova.compute.manager [-] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Took 1.37 seconds to deallocate network for instance. [ 1914.809277] env[63024]: DEBUG nova.compute.manager [None req-cf1c54d3-69e5-4078-8a17-191dcc48061f tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Found 1 images (rotation: 2) {{(pid=63024) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4884}} [ 1914.876993] env[63024]: DEBUG oslo_concurrency.lockutils [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "refresh_cache-9e8e7b6e-1bb2-4e66-b734-2f56e31302af" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1914.981673] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fcdbdef4-fdb8-4153-a99a-78bd51f52d8e tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Acquiring lock "73db94b8-cfa8-4457-bccb-d4b780edbd93" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1914.982090] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fcdbdef4-fdb8-4153-a99a-78bd51f52d8e tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lock "73db94b8-cfa8-4457-bccb-d4b780edbd93" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1914.982316] env[63024]: DEBUG nova.compute.manager [None req-fcdbdef4-fdb8-4153-a99a-78bd51f52d8e tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1914.983299] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aa9cbd9-3b79-4bdc-a8eb-85e4430a7f53 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1914.991867] env[63024]: DEBUG nova.compute.manager [None req-fcdbdef4-fdb8-4153-a99a-78bd51f52d8e tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63024) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1914.991867] env[63024]: DEBUG nova.objects.instance [None req-fcdbdef4-fdb8-4153-a99a-78bd51f52d8e tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lazy-loading 'flavor' on Instance uuid 73db94b8-cfa8-4457-bccb-d4b780edbd93 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1915.060146] env[63024]: DEBUG nova.compute.utils [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1915.065256] env[63024]: DEBUG nova.compute.manager [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1915.065256] env[63024]: DEBUG nova.network.neutron [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1915.092221] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4438d070-28f0-4340-800d-87807eee6378 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "1709d916-d0c4-4706-b41b-8b0ed25f3331" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.442s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1915.093400] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "1709d916-d0c4-4706-b41b-8b0ed25f3331" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 19.859s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1915.093551] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ff5a52aa-1e18-4564-97c7-0b0fbc43d55b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.109561] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-504c3e1d-17df-4585-9787-dc9776fe7213 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.124646] env[63024]: DEBUG nova.policy [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1efa94ebfd9143d7bb129313b3e3d5d0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a7fc70d467714e59b3c171a308feafdf', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1915.199215] env[63024]: DEBUG nova.network.neutron [req-1991eba8-6e25-4de5-8f02-009680716593 req-01738247-0080-4ac6-846f-8a7475ba71e6 service nova] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1915.260502] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e9670a20-73a3-40df-8c25-6a89af387d93 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1915.337786] env[63024]: DEBUG nova.network.neutron [req-1991eba8-6e25-4de5-8f02-009680716593 req-01738247-0080-4ac6-846f-8a7475ba71e6 service nova] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1915.351401] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a257aa6-407f-429d-abcc-18254bd17450 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.363957] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-78042208-3c77-4204-afc6-2993bff06578 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Doing hard reboot of VM {{(pid=63024) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1915.364286] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-5914491c-ce67-4c1f-a69d-427bbb0f028f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.373639] env[63024]: DEBUG oslo_vmware.api [None req-78042208-3c77-4204-afc6-2993bff06578 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Waiting for the task: (returnval){ [ 1915.373639] env[63024]: value = "task-1951282" [ 1915.373639] env[63024]: _type = "Task" [ 1915.373639] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1915.384793] env[63024]: DEBUG oslo_vmware.api [None req-78042208-3c77-4204-afc6-2993bff06578 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Task: {'id': task-1951282, 'name': ResetVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1915.442646] env[63024]: DEBUG nova.network.neutron [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Successfully created port: e916ba27-f6c3-4efb-9a22-64b761547830 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1915.540199] env[63024]: DEBUG nova.compute.manager [None req-fb3bad45-5dcb-4b97-bbc3-55c2a566277f tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1915.541511] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15fdf8f2-07f3-41c0-8f76-bcc36c89d36c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.564675] env[63024]: DEBUG nova.compute.manager [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1915.590336] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2567c4ec-37ed-4aa3-af90-08378cf2c45b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.599943] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad78accb-9b84-4eab-a355-b34abf0777f2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.636844] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-227a7e8d-2622-4d39-8045-aaa887897de0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.646480] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28c56365-5375-4e27-84ab-020dc62012dc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.662380] env[63024]: DEBUG nova.compute.provider_tree [None req-b6261d09-9b65-4470-bf9f-663330f30e1c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1915.669018] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "1709d916-d0c4-4706-b41b-8b0ed25f3331" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.576s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1915.755741] env[63024]: DEBUG nova.compute.manager [None req-758ca82e-1e5b-4167-8551-43564c654821 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1915.756742] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88d87eac-b50c-4945-99ea-637f44507b01 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1915.844574] env[63024]: DEBUG oslo_concurrency.lockutils [req-1991eba8-6e25-4de5-8f02-009680716593 req-01738247-0080-4ac6-846f-8a7475ba71e6 service nova] Releasing lock "refresh_cache-9e8e7b6e-1bb2-4e66-b734-2f56e31302af" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1915.845018] env[63024]: DEBUG oslo_concurrency.lockutils [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquired lock "refresh_cache-9e8e7b6e-1bb2-4e66-b734-2f56e31302af" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1915.845192] env[63024]: DEBUG nova.network.neutron [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1915.883869] env[63024]: DEBUG oslo_vmware.api [None req-78042208-3c77-4204-afc6-2993bff06578 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Task: {'id': task-1951282, 'name': ResetVM_Task, 'duration_secs': 0.12463} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1915.884163] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-78042208-3c77-4204-afc6-2993bff06578 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Did hard reboot of VM {{(pid=63024) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1915.885300] env[63024]: DEBUG nova.compute.manager [None req-78042208-3c77-4204-afc6-2993bff06578 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1915.885300] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cfba95b-c75f-4d30-baa8-1e835a52eaf3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.002223] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcdbdef4-fdb8-4153-a99a-78bd51f52d8e tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1916.002600] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-32d1ff0c-2413-4159-b7cf-deb16fc84919 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.012761] env[63024]: DEBUG oslo_vmware.api [None req-fcdbdef4-fdb8-4153-a99a-78bd51f52d8e tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Waiting for the task: (returnval){ [ 1916.012761] env[63024]: value = "task-1951283" [ 1916.012761] env[63024]: _type = "Task" [ 1916.012761] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1916.021713] env[63024]: DEBUG oslo_vmware.api [None req-fcdbdef4-fdb8-4153-a99a-78bd51f52d8e tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951283, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1916.059306] env[63024]: INFO nova.compute.manager [None req-fb3bad45-5dcb-4b97-bbc3-55c2a566277f tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] instance snapshotting [ 1916.062635] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8ee0789-89e4-4f81-90e1-fe08bb055a1d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.089340] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8da173eb-51e5-4c12-b5d5-7f1dfd0f3c11 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.166009] env[63024]: DEBUG nova.scheduler.client.report [None req-b6261d09-9b65-4470-bf9f-663330f30e1c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1916.269086] env[63024]: INFO nova.compute.manager [None req-758ca82e-1e5b-4167-8551-43564c654821 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] instance snapshotting [ 1916.269735] env[63024]: DEBUG nova.objects.instance [None req-758ca82e-1e5b-4167-8551-43564c654821 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lazy-loading 'flavor' on Instance uuid fe6847e2-a742-4338-983f-698c13aaefde {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1916.390586] env[63024]: DEBUG nova.network.neutron [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1916.401952] env[63024]: DEBUG oslo_concurrency.lockutils [None req-78042208-3c77-4204-afc6-2993bff06578 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Lock "43cdc362-588f-42cc-a4b2-a08fe60293a5" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.007s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1916.526706] env[63024]: DEBUG oslo_vmware.api [None req-fcdbdef4-fdb8-4153-a99a-78bd51f52d8e tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951283, 'name': PowerOffVM_Task, 'duration_secs': 0.500133} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1916.526997] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-fcdbdef4-fdb8-4153-a99a-78bd51f52d8e tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1916.527222] env[63024]: DEBUG nova.compute.manager [None req-fcdbdef4-fdb8-4153-a99a-78bd51f52d8e tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1916.527999] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d542565a-b21c-4fc2-a928-85656a64d8b7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.589675] env[63024]: DEBUG nova.compute.manager [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1916.601713] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-fb3bad45-5dcb-4b97-bbc3-55c2a566277f tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Creating Snapshot of the VM instance {{(pid=63024) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1916.602359] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-e9c1bc36-b20f-422c-b439-92859f638d91 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.612401] env[63024]: DEBUG oslo_vmware.api [None req-fb3bad45-5dcb-4b97-bbc3-55c2a566277f tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1916.612401] env[63024]: value = "task-1951284" [ 1916.612401] env[63024]: _type = "Task" [ 1916.612401] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1916.618410] env[63024]: DEBUG nova.virt.hardware [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1916.618666] env[63024]: DEBUG nova.virt.hardware [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1916.618905] env[63024]: DEBUG nova.virt.hardware [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1916.619007] env[63024]: DEBUG nova.virt.hardware [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1916.619161] env[63024]: DEBUG nova.virt.hardware [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1916.619310] env[63024]: DEBUG nova.virt.hardware [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1916.619576] env[63024]: DEBUG nova.virt.hardware [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1916.619827] env[63024]: DEBUG nova.virt.hardware [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1916.619896] env[63024]: DEBUG nova.virt.hardware [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1916.620077] env[63024]: DEBUG nova.virt.hardware [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1916.620284] env[63024]: DEBUG nova.virt.hardware [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1916.621497] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c4c95bd-3462-492a-ae58-5c81a4c9cce8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.636976] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f4707a6-50f6-4144-b396-29fef3e179b2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.642967] env[63024]: DEBUG oslo_vmware.api [None req-fb3bad45-5dcb-4b97-bbc3-55c2a566277f tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951284, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1916.657099] env[63024]: DEBUG nova.network.neutron [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Updating instance_info_cache with network_info: [{"id": "6aa34054-6865-4348-9871-fd32c747ab34", "address": "fa:16:3e:82:1f:7b", "network": {"id": "67ab6461-8fa5-4dfd-9267-561a710ae91b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1814728209-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1886be852b01400aaf7a31c8fe5d4d7a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6aa34054-68", "ovs_interfaceid": "6aa34054-6865-4348-9871-fd32c747ab34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1916.671295] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b6261d09-9b65-4470-bf9f-663330f30e1c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.116s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1916.673739] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.179s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1916.675272] env[63024]: INFO nova.compute.claims [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1916.707035] env[63024]: INFO nova.scheduler.client.report [None req-b6261d09-9b65-4470-bf9f-663330f30e1c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Deleted allocations for instance 9edbda30-2e28-4961-a6ad-5ab34c40ed44 [ 1916.778232] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4872253-2dbd-4db5-bbb6-f37221e85257 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.818107] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-739acfa5-5d6f-4722-8ca6-a9e88a2ae491 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1916.921789] env[63024]: DEBUG nova.compute.manager [req-6a6c8f2c-a2f0-41de-be24-e73e6c09209c req-2b18d2f8-c4c3-4430-a572-9268be94fa9a service nova] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Received event network-vif-plugged-e916ba27-f6c3-4efb-9a22-64b761547830 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1916.922065] env[63024]: DEBUG oslo_concurrency.lockutils [req-6a6c8f2c-a2f0-41de-be24-e73e6c09209c req-2b18d2f8-c4c3-4430-a572-9268be94fa9a service nova] Acquiring lock "6e0aa58b-85e0-4e74-812f-cc01041ed6d3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1916.922292] env[63024]: DEBUG oslo_concurrency.lockutils [req-6a6c8f2c-a2f0-41de-be24-e73e6c09209c req-2b18d2f8-c4c3-4430-a572-9268be94fa9a service nova] Lock "6e0aa58b-85e0-4e74-812f-cc01041ed6d3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1916.922462] env[63024]: DEBUG oslo_concurrency.lockutils [req-6a6c8f2c-a2f0-41de-be24-e73e6c09209c req-2b18d2f8-c4c3-4430-a572-9268be94fa9a service nova] Lock "6e0aa58b-85e0-4e74-812f-cc01041ed6d3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1916.922640] env[63024]: DEBUG nova.compute.manager [req-6a6c8f2c-a2f0-41de-be24-e73e6c09209c req-2b18d2f8-c4c3-4430-a572-9268be94fa9a service nova] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] No waiting events found dispatching network-vif-plugged-e916ba27-f6c3-4efb-9a22-64b761547830 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1916.922941] env[63024]: WARNING nova.compute.manager [req-6a6c8f2c-a2f0-41de-be24-e73e6c09209c req-2b18d2f8-c4c3-4430-a572-9268be94fa9a service nova] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Received unexpected event network-vif-plugged-e916ba27-f6c3-4efb-9a22-64b761547830 for instance with vm_state building and task_state spawning. [ 1917.045206] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fcdbdef4-fdb8-4153-a99a-78bd51f52d8e tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lock "73db94b8-cfa8-4457-bccb-d4b780edbd93" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.063s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1917.047477] env[63024]: DEBUG nova.network.neutron [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Successfully updated port: e916ba27-f6c3-4efb-9a22-64b761547830 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1917.118687] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-926b15e0-1446-4562-84da-d49161ba98f4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Volume attach. Driver type: vmdk {{(pid=63024) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1917.119211] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-926b15e0-1446-4562-84da-d49161ba98f4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402172', 'volume_id': 'c9e6b177-cc1d-4c76-9cb0-d45b6ea1eeae', 'name': 'volume-c9e6b177-cc1d-4c76-9cb0-d45b6ea1eeae', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '01b8072a-4483-4932-8294-7e5b48e6b203', 'attached_at': '', 'detached_at': '', 'volume_id': 'c9e6b177-cc1d-4c76-9cb0-d45b6ea1eeae', 'serial': 'c9e6b177-cc1d-4c76-9cb0-d45b6ea1eeae'} {{(pid=63024) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1917.119878] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54b43377-294c-4ebb-9127-df02a956f255 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.142611] env[63024]: DEBUG oslo_vmware.api [None req-fb3bad45-5dcb-4b97-bbc3-55c2a566277f tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951284, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.144691] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98ca93f2-f3d6-45e7-a57e-3ae40243a14f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.178626] env[63024]: DEBUG oslo_concurrency.lockutils [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Releasing lock "refresh_cache-9e8e7b6e-1bb2-4e66-b734-2f56e31302af" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1917.179157] env[63024]: DEBUG nova.compute.manager [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Instance network_info: |[{"id": "6aa34054-6865-4348-9871-fd32c747ab34", "address": "fa:16:3e:82:1f:7b", "network": {"id": "67ab6461-8fa5-4dfd-9267-561a710ae91b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1814728209-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1886be852b01400aaf7a31c8fe5d4d7a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6aa34054-68", "ovs_interfaceid": "6aa34054-6865-4348-9871-fd32c747ab34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1917.192865] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-926b15e0-1446-4562-84da-d49161ba98f4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Reconfiguring VM instance instance-00000048 to attach disk [datastore1] volume-c9e6b177-cc1d-4c76-9cb0-d45b6ea1eeae/volume-c9e6b177-cc1d-4c76-9cb0-d45b6ea1eeae.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1917.197275] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:82:1f:7b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'abe48956-848a-4e1f-b1f1-a27baa5390b9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6aa34054-6865-4348-9871-fd32c747ab34', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1917.206101] env[63024]: DEBUG oslo.service.loopingcall [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1917.206824] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fa4c5077-ff9c-403d-878b-445ce988d487 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.222959] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1917.226988] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b6261d09-9b65-4470-bf9f-663330f30e1c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Lock "9edbda30-2e28-4961-a6ad-5ab34c40ed44" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.602s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1917.228191] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bcbaebab-216d-48e4-a111-53cbf8554e60 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.250544] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "9edbda30-2e28-4961-a6ad-5ab34c40ed44" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 22.012s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1917.251675] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5ad76b2c-4695-46a6-aa58-1904a9d1b496 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.258705] env[63024]: DEBUG oslo_vmware.api [None req-926b15e0-1446-4562-84da-d49161ba98f4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1917.258705] env[63024]: value = "task-1951285" [ 1917.258705] env[63024]: _type = "Task" [ 1917.258705] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1917.267341] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1917.267341] env[63024]: value = "task-1951286" [ 1917.267341] env[63024]: _type = "Task" [ 1917.267341] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1917.271167] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aecaf8f-a025-4a21-888e-95d59abb2c70 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.291483] env[63024]: DEBUG oslo_vmware.api [None req-926b15e0-1446-4562-84da-d49161ba98f4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951285, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.301243] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951286, 'name': CreateVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.339309] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-758ca82e-1e5b-4167-8551-43564c654821 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Creating Snapshot of the VM instance {{(pid=63024) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1917.340727] env[63024]: DEBUG nova.compute.manager [req-4cc8343a-51c7-41b5-b002-b221fdefc233 req-15d0f7e0-ca3f-4fc5-823f-7c304cf18617 service nova] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Received event network-changed-a7a97c50-68b5-4301-99d3-7cd47c2d96d8 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1917.340996] env[63024]: DEBUG nova.compute.manager [req-4cc8343a-51c7-41b5-b002-b221fdefc233 req-15d0f7e0-ca3f-4fc5-823f-7c304cf18617 service nova] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Refreshing instance network info cache due to event network-changed-a7a97c50-68b5-4301-99d3-7cd47c2d96d8. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1917.341279] env[63024]: DEBUG oslo_concurrency.lockutils [req-4cc8343a-51c7-41b5-b002-b221fdefc233 req-15d0f7e0-ca3f-4fc5-823f-7c304cf18617 service nova] Acquiring lock "refresh_cache-43cdc362-588f-42cc-a4b2-a08fe60293a5" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1917.341474] env[63024]: DEBUG oslo_concurrency.lockutils [req-4cc8343a-51c7-41b5-b002-b221fdefc233 req-15d0f7e0-ca3f-4fc5-823f-7c304cf18617 service nova] Acquired lock "refresh_cache-43cdc362-588f-42cc-a4b2-a08fe60293a5" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1917.341845] env[63024]: DEBUG nova.network.neutron [req-4cc8343a-51c7-41b5-b002-b221fdefc233 req-15d0f7e0-ca3f-4fc5-823f-7c304cf18617 service nova] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Refreshing network info cache for port a7a97c50-68b5-4301-99d3-7cd47c2d96d8 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1917.343534] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-e885befa-e4a4-4c99-b319-4589837a77d8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.356468] env[63024]: DEBUG oslo_vmware.api [None req-758ca82e-1e5b-4167-8551-43564c654821 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 1917.356468] env[63024]: value = "task-1951287" [ 1917.356468] env[63024]: _type = "Task" [ 1917.356468] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1917.369539] env[63024]: DEBUG oslo_vmware.api [None req-758ca82e-1e5b-4167-8551-43564c654821 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951287, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.555492] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Acquiring lock "refresh_cache-6e0aa58b-85e0-4e74-812f-cc01041ed6d3" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1917.555492] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Acquired lock "refresh_cache-6e0aa58b-85e0-4e74-812f-cc01041ed6d3" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1917.555492] env[63024]: DEBUG nova.network.neutron [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1917.625044] env[63024]: DEBUG oslo_vmware.api [None req-fb3bad45-5dcb-4b97-bbc3-55c2a566277f tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951284, 'name': CreateSnapshot_Task, 'duration_secs': 0.949962} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1917.625299] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-fb3bad45-5dcb-4b97-bbc3-55c2a566277f tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Created Snapshot of the VM instance {{(pid=63024) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1917.626100] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63a3ff45-0bd4-4906-b858-ada9909a3e91 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.651986] env[63024]: DEBUG nova.objects.instance [None req-7e2f8f78-2088-413e-8d55-b4bcd1bc86ca tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lazy-loading 'flavor' on Instance uuid 73db94b8-cfa8-4457-bccb-d4b780edbd93 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1917.777644] env[63024]: DEBUG oslo_vmware.api [None req-926b15e0-1446-4562-84da-d49161ba98f4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951285, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.786304] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951286, 'name': CreateVM_Task, 'duration_secs': 0.51055} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1917.786650] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1917.787280] env[63024]: DEBUG oslo_concurrency.lockutils [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1917.787549] env[63024]: DEBUG oslo_concurrency.lockutils [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1917.787803] env[63024]: DEBUG oslo_concurrency.lockutils [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1917.788085] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c2d0874-3c54-43fc-87a3-1e5982242713 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1917.794057] env[63024]: DEBUG oslo_vmware.api [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1917.794057] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]525991f3-01dd-e053-5aea-652c2f7120b1" [ 1917.794057] env[63024]: _type = "Task" [ 1917.794057] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1917.806744] env[63024]: DEBUG oslo_vmware.api [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]525991f3-01dd-e053-5aea-652c2f7120b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1917.844387] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "9edbda30-2e28-4961-a6ad-5ab34c40ed44" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.594s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1917.870316] env[63024]: DEBUG oslo_vmware.api [None req-758ca82e-1e5b-4167-8551-43564c654821 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951287, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.064530] env[63024]: DEBUG nova.network.neutron [req-4cc8343a-51c7-41b5-b002-b221fdefc233 req-15d0f7e0-ca3f-4fc5-823f-7c304cf18617 service nova] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Updated VIF entry in instance network info cache for port a7a97c50-68b5-4301-99d3-7cd47c2d96d8. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1918.064908] env[63024]: DEBUG nova.network.neutron [req-4cc8343a-51c7-41b5-b002-b221fdefc233 req-15d0f7e0-ca3f-4fc5-823f-7c304cf18617 service nova] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Updating instance_info_cache with network_info: [{"id": "a7a97c50-68b5-4301-99d3-7cd47c2d96d8", "address": "fa:16:3e:5e:f9:6e", "network": {"id": "384d05e3-ef53-40f3-8a75-21f850df070c", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-411167579-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e138433d59374418952a186a4d2a0f78", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0a76279-3c11-4bef-b124-2a2ee13fa377", "external-id": "nsx-vlan-transportzone-738", "segmentation_id": 738, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa7a97c50-68", "ovs_interfaceid": "a7a97c50-68b5-4301-99d3-7cd47c2d96d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1918.103606] env[63024]: DEBUG nova.network.neutron [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1918.112393] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59b04bd6-c242-4ba3-b9f4-017db050f062 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.122860] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7afce886-617d-4131-b03f-518ac3f1f856 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.158563] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-fb3bad45-5dcb-4b97-bbc3-55c2a566277f tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Creating linked-clone VM from snapshot {{(pid=63024) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1918.163112] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-80465dc0-8830-437d-8c5a-a87b4d488a53 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.166046] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4a41005a-6633-4657-bf89-41217217cc32 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Acquiring lock "43cdc362-588f-42cc-a4b2-a08fe60293a5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1918.166268] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4a41005a-6633-4657-bf89-41217217cc32 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Lock "43cdc362-588f-42cc-a4b2-a08fe60293a5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1918.166469] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4a41005a-6633-4657-bf89-41217217cc32 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Acquiring lock "43cdc362-588f-42cc-a4b2-a08fe60293a5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1918.166653] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4a41005a-6633-4657-bf89-41217217cc32 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Lock "43cdc362-588f-42cc-a4b2-a08fe60293a5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1918.166817] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4a41005a-6633-4657-bf89-41217217cc32 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Lock "43cdc362-588f-42cc-a4b2-a08fe60293a5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1918.168865] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13d3dea9-0e08-4668-89d8-1e8d2321bac9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.171257] env[63024]: INFO nova.compute.manager [None req-4a41005a-6633-4657-bf89-41217217cc32 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Terminating instance [ 1918.172680] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7e2f8f78-2088-413e-8d55-b4bcd1bc86ca tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Acquiring lock "refresh_cache-73db94b8-cfa8-4457-bccb-d4b780edbd93" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1918.172888] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7e2f8f78-2088-413e-8d55-b4bcd1bc86ca tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Acquired lock "refresh_cache-73db94b8-cfa8-4457-bccb-d4b780edbd93" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1918.173083] env[63024]: DEBUG nova.network.neutron [None req-7e2f8f78-2088-413e-8d55-b4bcd1bc86ca tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1918.173260] env[63024]: DEBUG nova.objects.instance [None req-7e2f8f78-2088-413e-8d55-b4bcd1bc86ca tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lazy-loading 'info_cache' on Instance uuid 73db94b8-cfa8-4457-bccb-d4b780edbd93 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1918.183908] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-646ddcae-d71e-453d-bca1-3880ae5af47c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.187858] env[63024]: DEBUG oslo_vmware.api [None req-fb3bad45-5dcb-4b97-bbc3-55c2a566277f tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1918.187858] env[63024]: value = "task-1951288" [ 1918.187858] env[63024]: _type = "Task" [ 1918.187858] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1918.202078] env[63024]: DEBUG nova.compute.provider_tree [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1918.208536] env[63024]: DEBUG oslo_vmware.api [None req-fb3bad45-5dcb-4b97-bbc3-55c2a566277f tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951288, 'name': CloneVM_Task} progress is 10%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.270874] env[63024]: DEBUG oslo_vmware.api [None req-926b15e0-1446-4562-84da-d49161ba98f4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951285, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.307656] env[63024]: DEBUG oslo_vmware.api [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]525991f3-01dd-e053-5aea-652c2f7120b1, 'name': SearchDatastore_Task, 'duration_secs': 0.019944} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1918.308107] env[63024]: DEBUG oslo_concurrency.lockutils [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1918.308516] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1918.308577] env[63024]: DEBUG oslo_concurrency.lockutils [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1918.308784] env[63024]: DEBUG oslo_concurrency.lockutils [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1918.308905] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1918.309254] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-41a9a3ef-0009-4f9c-bd4f-2d1e6d9115ba {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.313388] env[63024]: DEBUG nova.network.neutron [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Updating instance_info_cache with network_info: [{"id": "e916ba27-f6c3-4efb-9a22-64b761547830", "address": "fa:16:3e:2b:83:5b", "network": {"id": "2f5cbe03-419b-4995-837c-3389d94c2be3", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1864944898-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a7fc70d467714e59b3c171a308feafdf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0d2101e-2d93-4310-a242-af2d9ecdaf9b", "external-id": "nsx-vlan-transportzone-121", "segmentation_id": 121, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape916ba27-f6", "ovs_interfaceid": "e916ba27-f6c3-4efb-9a22-64b761547830", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1918.321275] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1918.321483] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1918.322320] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-267f67d8-c0d0-4c72-b466-a3328fce3400 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.329369] env[63024]: DEBUG oslo_vmware.api [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1918.329369] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]522b2fe0-4b6e-7bc4-f0c4-026c8e58fdd8" [ 1918.329369] env[63024]: _type = "Task" [ 1918.329369] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1918.343160] env[63024]: DEBUG oslo_vmware.api [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]522b2fe0-4b6e-7bc4-f0c4-026c8e58fdd8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.357797] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9ee321f0-5683-4c92-9fa8-4c9828294ed3 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "interface-9a7f4452-ae50-4779-8474-11d3a6d3533f-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1918.358028] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9ee321f0-5683-4c92-9fa8-4c9828294ed3 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "interface-9a7f4452-ae50-4779-8474-11d3a6d3533f-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1918.358362] env[63024]: DEBUG nova.objects.instance [None req-9ee321f0-5683-4c92-9fa8-4c9828294ed3 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lazy-loading 'flavor' on Instance uuid 9a7f4452-ae50-4779-8474-11d3a6d3533f {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1918.369959] env[63024]: DEBUG oslo_vmware.api [None req-758ca82e-1e5b-4167-8551-43564c654821 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951287, 'name': CreateSnapshot_Task, 'duration_secs': 0.589627} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1918.369959] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-758ca82e-1e5b-4167-8551-43564c654821 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Created Snapshot of the VM instance {{(pid=63024) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1918.370187] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34219e26-0a97-4da2-8435-6eb84fa9ce39 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.568295] env[63024]: DEBUG oslo_concurrency.lockutils [req-4cc8343a-51c7-41b5-b002-b221fdefc233 req-15d0f7e0-ca3f-4fc5-823f-7c304cf18617 service nova] Releasing lock "refresh_cache-43cdc362-588f-42cc-a4b2-a08fe60293a5" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1918.676575] env[63024]: DEBUG nova.objects.base [None req-7e2f8f78-2088-413e-8d55-b4bcd1bc86ca tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Object Instance<73db94b8-cfa8-4457-bccb-d4b780edbd93> lazy-loaded attributes: flavor,info_cache {{(pid=63024) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1918.678281] env[63024]: DEBUG nova.compute.manager [None req-4a41005a-6633-4657-bf89-41217217cc32 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1918.678398] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4a41005a-6633-4657-bf89-41217217cc32 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1918.679414] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54ee74c7-59bc-4d8c-a079-8ab49ae1b322 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.687907] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a41005a-6633-4657-bf89-41217217cc32 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1918.688668] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6f0ee055-efcd-40ba-9ea1-fb641059172c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.701043] env[63024]: DEBUG oslo_vmware.api [None req-fb3bad45-5dcb-4b97-bbc3-55c2a566277f tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951288, 'name': CloneVM_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.702389] env[63024]: DEBUG oslo_vmware.api [None req-4a41005a-6633-4657-bf89-41217217cc32 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Waiting for the task: (returnval){ [ 1918.702389] env[63024]: value = "task-1951289" [ 1918.702389] env[63024]: _type = "Task" [ 1918.702389] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1918.706282] env[63024]: DEBUG nova.scheduler.client.report [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1918.714298] env[63024]: DEBUG oslo_vmware.api [None req-4a41005a-6633-4657-bf89-41217217cc32 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Task: {'id': task-1951289, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.770476] env[63024]: DEBUG oslo_vmware.api [None req-926b15e0-1446-4562-84da-d49161ba98f4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951285, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.816116] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Releasing lock "refresh_cache-6e0aa58b-85e0-4e74-812f-cc01041ed6d3" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1918.816543] env[63024]: DEBUG nova.compute.manager [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Instance network_info: |[{"id": "e916ba27-f6c3-4efb-9a22-64b761547830", "address": "fa:16:3e:2b:83:5b", "network": {"id": "2f5cbe03-419b-4995-837c-3389d94c2be3", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1864944898-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a7fc70d467714e59b3c171a308feafdf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0d2101e-2d93-4310-a242-af2d9ecdaf9b", "external-id": "nsx-vlan-transportzone-121", "segmentation_id": 121, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape916ba27-f6", "ovs_interfaceid": "e916ba27-f6c3-4efb-9a22-64b761547830", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1918.817015] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2b:83:5b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a0d2101e-2d93-4310-a242-af2d9ecdaf9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e916ba27-f6c3-4efb-9a22-64b761547830', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1918.824835] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Creating folder: Project (a7fc70d467714e59b3c171a308feafdf). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1918.825157] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d42cf82c-19be-42c5-8ae1-17e89bc3f505 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.840451] env[63024]: DEBUG oslo_vmware.api [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]522b2fe0-4b6e-7bc4-f0c4-026c8e58fdd8, 'name': SearchDatastore_Task, 'duration_secs': 0.020552} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1918.841882] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Created folder: Project (a7fc70d467714e59b3c171a308feafdf) in parent group-v401959. [ 1918.842067] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Creating folder: Instances. Parent ref: group-v402177. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1918.842285] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-80740ca3-2fb1-4240-94d1-8da854e5c167 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.844655] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f3aa8ec1-1b58-45d9-8663-da9898a5edac {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.850857] env[63024]: DEBUG oslo_vmware.api [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1918.850857] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]526ad506-be5e-75d7-c0ab-73327f0a7f13" [ 1918.850857] env[63024]: _type = "Task" [ 1918.850857] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1918.857103] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Created folder: Instances in parent group-v402177. [ 1918.857341] env[63024]: DEBUG oslo.service.loopingcall [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1918.857882] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1918.858130] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0a530a55-034b-4b42-928d-4a834133e122 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.876891] env[63024]: DEBUG nova.objects.instance [None req-9ee321f0-5683-4c92-9fa8-4c9828294ed3 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lazy-loading 'pci_requests' on Instance uuid 9a7f4452-ae50-4779-8474-11d3a6d3533f {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1918.878100] env[63024]: DEBUG oslo_vmware.api [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]526ad506-be5e-75d7-c0ab-73327f0a7f13, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.890351] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-758ca82e-1e5b-4167-8551-43564c654821 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Creating linked-clone VM from snapshot {{(pid=63024) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1918.893266] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-e3e9f0a7-6f07-4842-81dd-de72a1ede022 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1918.898318] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1918.898318] env[63024]: value = "task-1951292" [ 1918.898318] env[63024]: _type = "Task" [ 1918.898318] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1918.906686] env[63024]: DEBUG oslo_vmware.api [None req-758ca82e-1e5b-4167-8551-43564c654821 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 1918.906686] env[63024]: value = "task-1951293" [ 1918.906686] env[63024]: _type = "Task" [ 1918.906686] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1918.910608] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951292, 'name': CreateVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1918.922010] env[63024]: DEBUG oslo_vmware.api [None req-758ca82e-1e5b-4167-8551-43564c654821 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951293, 'name': CloneVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.082349] env[63024]: DEBUG nova.compute.manager [req-e0a55823-c336-470f-bb63-6f27440258d0 req-a7aa0859-1223-41f2-b93c-56c1364455aa service nova] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Received event network-changed-e916ba27-f6c3-4efb-9a22-64b761547830 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1919.082579] env[63024]: DEBUG nova.compute.manager [req-e0a55823-c336-470f-bb63-6f27440258d0 req-a7aa0859-1223-41f2-b93c-56c1364455aa service nova] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Refreshing instance network info cache due to event network-changed-e916ba27-f6c3-4efb-9a22-64b761547830. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1919.082860] env[63024]: DEBUG oslo_concurrency.lockutils [req-e0a55823-c336-470f-bb63-6f27440258d0 req-a7aa0859-1223-41f2-b93c-56c1364455aa service nova] Acquiring lock "refresh_cache-6e0aa58b-85e0-4e74-812f-cc01041ed6d3" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1919.083040] env[63024]: DEBUG oslo_concurrency.lockutils [req-e0a55823-c336-470f-bb63-6f27440258d0 req-a7aa0859-1223-41f2-b93c-56c1364455aa service nova] Acquired lock "refresh_cache-6e0aa58b-85e0-4e74-812f-cc01041ed6d3" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1919.083218] env[63024]: DEBUG nova.network.neutron [req-e0a55823-c336-470f-bb63-6f27440258d0 req-a7aa0859-1223-41f2-b93c-56c1364455aa service nova] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Refreshing network info cache for port e916ba27-f6c3-4efb-9a22-64b761547830 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1919.201239] env[63024]: DEBUG oslo_vmware.api [None req-fb3bad45-5dcb-4b97-bbc3-55c2a566277f tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951288, 'name': CloneVM_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.211194] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.537s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1919.211739] env[63024]: DEBUG nova.compute.manager [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1919.217138] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.652s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1919.218548] env[63024]: INFO nova.compute.claims [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1919.221639] env[63024]: DEBUG oslo_vmware.api [None req-4a41005a-6633-4657-bf89-41217217cc32 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Task: {'id': task-1951289, 'name': PowerOffVM_Task, 'duration_secs': 0.280834} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1919.222288] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-4a41005a-6633-4657-bf89-41217217cc32 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1919.222393] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4a41005a-6633-4657-bf89-41217217cc32 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1919.222893] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3048fe6f-e7f5-48fb-92df-1b45c374a2a0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.274372] env[63024]: DEBUG oslo_vmware.api [None req-926b15e0-1446-4562-84da-d49161ba98f4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951285, 'name': ReconfigVM_Task, 'duration_secs': 1.590146} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1919.274759] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-926b15e0-1446-4562-84da-d49161ba98f4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Reconfigured VM instance instance-00000048 to attach disk [datastore1] volume-c9e6b177-cc1d-4c76-9cb0-d45b6ea1eeae/volume-c9e6b177-cc1d-4c76-9cb0-d45b6ea1eeae.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1919.279839] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-669cb4c3-4627-4b34-97b8-cf592a549216 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.301149] env[63024]: DEBUG oslo_vmware.api [None req-926b15e0-1446-4562-84da-d49161ba98f4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1919.301149] env[63024]: value = "task-1951295" [ 1919.301149] env[63024]: _type = "Task" [ 1919.301149] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1919.315781] env[63024]: DEBUG oslo_vmware.api [None req-926b15e0-1446-4562-84da-d49161ba98f4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951295, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.365643] env[63024]: DEBUG oslo_vmware.api [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]526ad506-be5e-75d7-c0ab-73327f0a7f13, 'name': SearchDatastore_Task, 'duration_secs': 0.020696} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1919.366224] env[63024]: DEBUG oslo_concurrency.lockutils [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1919.366369] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 9e8e7b6e-1bb2-4e66-b734-2f56e31302af/9e8e7b6e-1bb2-4e66-b734-2f56e31302af.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1919.366566] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2f7bff79-2967-4208-b716-b616091e4e52 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.376747] env[63024]: DEBUG oslo_vmware.api [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1919.376747] env[63024]: value = "task-1951296" [ 1919.376747] env[63024]: _type = "Task" [ 1919.376747] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1919.380470] env[63024]: DEBUG nova.objects.base [None req-9ee321f0-5683-4c92-9fa8-4c9828294ed3 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Object Instance<9a7f4452-ae50-4779-8474-11d3a6d3533f> lazy-loaded attributes: flavor,pci_requests {{(pid=63024) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1919.380682] env[63024]: DEBUG nova.network.neutron [None req-9ee321f0-5683-4c92-9fa8-4c9828294ed3 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1919.388244] env[63024]: DEBUG oslo_vmware.api [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951296, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.409471] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951292, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.421682] env[63024]: DEBUG oslo_vmware.api [None req-758ca82e-1e5b-4167-8551-43564c654821 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951293, 'name': CloneVM_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.489142] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9ee321f0-5683-4c92-9fa8-4c9828294ed3 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "interface-9a7f4452-ae50-4779-8474-11d3a6d3533f-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.131s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1919.493530] env[63024]: DEBUG nova.network.neutron [None req-7e2f8f78-2088-413e-8d55-b4bcd1bc86ca tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Updating instance_info_cache with network_info: [{"id": "f78f097c-0df1-4f4f-8941-cf21c2b2ca4b", "address": "fa:16:3e:df:09:6b", "network": {"id": "83ed1c04-a2e0-4c15-ae35-68e988607ce4", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-470202335-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.218", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5dcb1fcc9fd945cb9f4477fe1cce3f5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf78f097c-0d", "ovs_interfaceid": "f78f097c-0df1-4f4f-8941-cf21c2b2ca4b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1919.501322] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4a41005a-6633-4657-bf89-41217217cc32 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1919.501543] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4a41005a-6633-4657-bf89-41217217cc32 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1919.501806] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a41005a-6633-4657-bf89-41217217cc32 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Deleting the datastore file [datastore1] 43cdc362-588f-42cc-a4b2-a08fe60293a5 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1919.502254] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e5f64088-f852-43db-b33f-a30743b7e10d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.512893] env[63024]: DEBUG oslo_vmware.api [None req-4a41005a-6633-4657-bf89-41217217cc32 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Waiting for the task: (returnval){ [ 1919.512893] env[63024]: value = "task-1951297" [ 1919.512893] env[63024]: _type = "Task" [ 1919.512893] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1919.527746] env[63024]: DEBUG oslo_vmware.api [None req-4a41005a-6633-4657-bf89-41217217cc32 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Task: {'id': task-1951297, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.704240] env[63024]: DEBUG oslo_vmware.api [None req-fb3bad45-5dcb-4b97-bbc3-55c2a566277f tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951288, 'name': CloneVM_Task} progress is 95%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.727713] env[63024]: DEBUG nova.compute.utils [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1919.729833] env[63024]: DEBUG nova.compute.manager [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1919.730082] env[63024]: DEBUG nova.network.neutron [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1919.780717] env[63024]: DEBUG nova.policy [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8d3f9605a2384a919157a571cd164859', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a1166551532c473ca470379b16664513', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1919.820915] env[63024]: DEBUG oslo_vmware.api [None req-926b15e0-1446-4562-84da-d49161ba98f4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951295, 'name': ReconfigVM_Task, 'duration_secs': 0.180578} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1919.821320] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-926b15e0-1446-4562-84da-d49161ba98f4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402172', 'volume_id': 'c9e6b177-cc1d-4c76-9cb0-d45b6ea1eeae', 'name': 'volume-c9e6b177-cc1d-4c76-9cb0-d45b6ea1eeae', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '01b8072a-4483-4932-8294-7e5b48e6b203', 'attached_at': '', 'detached_at': '', 'volume_id': 'c9e6b177-cc1d-4c76-9cb0-d45b6ea1eeae', 'serial': 'c9e6b177-cc1d-4c76-9cb0-d45b6ea1eeae'} {{(pid=63024) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1919.849139] env[63024]: DEBUG nova.network.neutron [req-e0a55823-c336-470f-bb63-6f27440258d0 req-a7aa0859-1223-41f2-b93c-56c1364455aa service nova] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Updated VIF entry in instance network info cache for port e916ba27-f6c3-4efb-9a22-64b761547830. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1919.849415] env[63024]: DEBUG nova.network.neutron [req-e0a55823-c336-470f-bb63-6f27440258d0 req-a7aa0859-1223-41f2-b93c-56c1364455aa service nova] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Updating instance_info_cache with network_info: [{"id": "e916ba27-f6c3-4efb-9a22-64b761547830", "address": "fa:16:3e:2b:83:5b", "network": {"id": "2f5cbe03-419b-4995-837c-3389d94c2be3", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1864944898-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a7fc70d467714e59b3c171a308feafdf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0d2101e-2d93-4310-a242-af2d9ecdaf9b", "external-id": "nsx-vlan-transportzone-121", "segmentation_id": 121, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape916ba27-f6", "ovs_interfaceid": "e916ba27-f6c3-4efb-9a22-64b761547830", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1919.890227] env[63024]: DEBUG oslo_vmware.api [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951296, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.910879] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951292, 'name': CreateVM_Task, 'duration_secs': 0.752422} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1919.911160] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1919.914929] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1919.915216] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1919.915480] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1919.915754] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-620e8740-d320-4d3d-99ff-b5cecf5646c4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.925358] env[63024]: DEBUG oslo_vmware.api [None req-758ca82e-1e5b-4167-8551-43564c654821 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951293, 'name': CloneVM_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.926558] env[63024]: DEBUG oslo_vmware.api [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Waiting for the task: (returnval){ [ 1919.926558] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52f027c6-31b8-be49-273b-f63e73419996" [ 1919.926558] env[63024]: _type = "Task" [ 1919.926558] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1919.936910] env[63024]: DEBUG oslo_vmware.api [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52f027c6-31b8-be49-273b-f63e73419996, 'name': SearchDatastore_Task, 'duration_secs': 0.010699} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1919.937244] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1919.937489] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1919.937836] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1919.937923] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1919.938071] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1919.938321] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-01697cd3-d928-4715-b3fc-cbbe4a3885da {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.947392] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1919.947581] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1919.948326] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c5d0ac94-cd2d-4a6b-890e-58060516daa8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1919.957796] env[63024]: DEBUG oslo_vmware.api [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Waiting for the task: (returnval){ [ 1919.957796] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]521d5020-4b36-3083-6664-446b970e510c" [ 1919.957796] env[63024]: _type = "Task" [ 1919.957796] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1919.966862] env[63024]: DEBUG oslo_vmware.api [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]521d5020-4b36-3083-6664-446b970e510c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1919.996732] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7e2f8f78-2088-413e-8d55-b4bcd1bc86ca tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Releasing lock "refresh_cache-73db94b8-cfa8-4457-bccb-d4b780edbd93" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1920.023833] env[63024]: DEBUG oslo_vmware.api [None req-4a41005a-6633-4657-bf89-41217217cc32 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Task: {'id': task-1951297, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.505063} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1920.024105] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-4a41005a-6633-4657-bf89-41217217cc32 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1920.024296] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4a41005a-6633-4657-bf89-41217217cc32 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1920.024474] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4a41005a-6633-4657-bf89-41217217cc32 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1920.024643] env[63024]: INFO nova.compute.manager [None req-4a41005a-6633-4657-bf89-41217217cc32 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Took 1.35 seconds to destroy the instance on the hypervisor. [ 1920.024882] env[63024]: DEBUG oslo.service.loopingcall [None req-4a41005a-6633-4657-bf89-41217217cc32 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1920.025336] env[63024]: DEBUG nova.compute.manager [-] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1920.025443] env[63024]: DEBUG nova.network.neutron [-] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1920.066361] env[63024]: DEBUG nova.network.neutron [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] Successfully created port: 6bc8e59c-a429-442e-a0c8-d59867ad2ad9 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1920.201958] env[63024]: DEBUG oslo_vmware.api [None req-fb3bad45-5dcb-4b97-bbc3-55c2a566277f tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951288, 'name': CloneVM_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1920.233945] env[63024]: DEBUG nova.compute.manager [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1920.267542] env[63024]: DEBUG oslo_concurrency.lockutils [None req-632d938b-1db3-45b7-98dc-39fbcb4d2f44 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Acquiring lock "9267e5e4-732d-47f1-8a30-d926a1269fb9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1920.267980] env[63024]: DEBUG oslo_concurrency.lockutils [None req-632d938b-1db3-45b7-98dc-39fbcb4d2f44 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Lock "9267e5e4-732d-47f1-8a30-d926a1269fb9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1920.268380] env[63024]: DEBUG oslo_concurrency.lockutils [None req-632d938b-1db3-45b7-98dc-39fbcb4d2f44 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Acquiring lock "9267e5e4-732d-47f1-8a30-d926a1269fb9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1920.268780] env[63024]: DEBUG oslo_concurrency.lockutils [None req-632d938b-1db3-45b7-98dc-39fbcb4d2f44 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Lock "9267e5e4-732d-47f1-8a30-d926a1269fb9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1920.268990] env[63024]: DEBUG oslo_concurrency.lockutils [None req-632d938b-1db3-45b7-98dc-39fbcb4d2f44 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Lock "9267e5e4-732d-47f1-8a30-d926a1269fb9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1920.271933] env[63024]: INFO nova.compute.manager [None req-632d938b-1db3-45b7-98dc-39fbcb4d2f44 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Terminating instance [ 1920.351998] env[63024]: DEBUG oslo_concurrency.lockutils [req-e0a55823-c336-470f-bb63-6f27440258d0 req-a7aa0859-1223-41f2-b93c-56c1364455aa service nova] Releasing lock "refresh_cache-6e0aa58b-85e0-4e74-812f-cc01041ed6d3" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1920.394989] env[63024]: DEBUG oslo_vmware.api [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951296, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.529443} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1920.395278] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 9e8e7b6e-1bb2-4e66-b734-2f56e31302af/9e8e7b6e-1bb2-4e66-b734-2f56e31302af.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1920.395534] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1920.395749] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9ab9907c-ae71-4668-bcfb-09b221dfe2a7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.406977] env[63024]: DEBUG oslo_vmware.api [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1920.406977] env[63024]: value = "task-1951298" [ 1920.406977] env[63024]: _type = "Task" [ 1920.406977] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1920.419018] env[63024]: DEBUG oslo_vmware.api [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951298, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1920.429578] env[63024]: DEBUG oslo_vmware.api [None req-758ca82e-1e5b-4167-8551-43564c654821 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951293, 'name': CloneVM_Task, 'duration_secs': 1.270766} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1920.429931] env[63024]: INFO nova.virt.vmwareapi.vmops [None req-758ca82e-1e5b-4167-8551-43564c654821 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Created linked-clone VM from snapshot [ 1920.430845] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e159e22a-d95c-440a-8519-0dca7f07c449 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.440830] env[63024]: DEBUG nova.virt.vmwareapi.images [None req-758ca82e-1e5b-4167-8551-43564c654821 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Uploading image 01cd4193-31a8-4abd-82db-4ca40cdf42a3 {{(pid=63024) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1920.470679] env[63024]: DEBUG oslo_vmware.api [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]521d5020-4b36-3083-6664-446b970e510c, 'name': SearchDatastore_Task, 'duration_secs': 0.010818} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1920.474298] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6882164-4ba7-4071-864e-9d3384416bfa {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.479621] env[63024]: DEBUG oslo_vmware.rw_handles [None req-758ca82e-1e5b-4167-8551-43564c654821 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1920.479621] env[63024]: value = "vm-402180" [ 1920.479621] env[63024]: _type = "VirtualMachine" [ 1920.479621] env[63024]: }. {{(pid=63024) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1920.479884] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-7506db73-28cf-4da1-8788-33a4aa173d16 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.482779] env[63024]: DEBUG oslo_vmware.api [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Waiting for the task: (returnval){ [ 1920.482779] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]521d9015-d908-7cc1-ddba-55d91deb4112" [ 1920.482779] env[63024]: _type = "Task" [ 1920.482779] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1920.491126] env[63024]: DEBUG oslo_vmware.rw_handles [None req-758ca82e-1e5b-4167-8551-43564c654821 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lease: (returnval){ [ 1920.491126] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52c66fa8-7def-8de4-fe2b-78151c6903e9" [ 1920.491126] env[63024]: _type = "HttpNfcLease" [ 1920.491126] env[63024]: } obtained for exporting VM: (result){ [ 1920.491126] env[63024]: value = "vm-402180" [ 1920.491126] env[63024]: _type = "VirtualMachine" [ 1920.491126] env[63024]: }. {{(pid=63024) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1920.491126] env[63024]: DEBUG oslo_vmware.api [None req-758ca82e-1e5b-4167-8551-43564c654821 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the lease: (returnval){ [ 1920.491126] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52c66fa8-7def-8de4-fe2b-78151c6903e9" [ 1920.491126] env[63024]: _type = "HttpNfcLease" [ 1920.491126] env[63024]: } to be ready. {{(pid=63024) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1920.496889] env[63024]: DEBUG oslo_vmware.api [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]521d9015-d908-7cc1-ddba-55d91deb4112, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1920.505422] env[63024]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1920.505422] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52c66fa8-7def-8de4-fe2b-78151c6903e9" [ 1920.505422] env[63024]: _type = "HttpNfcLease" [ 1920.505422] env[63024]: } is ready. {{(pid=63024) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1920.508179] env[63024]: DEBUG oslo_vmware.rw_handles [None req-758ca82e-1e5b-4167-8551-43564c654821 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1920.508179] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52c66fa8-7def-8de4-fe2b-78151c6903e9" [ 1920.508179] env[63024]: _type = "HttpNfcLease" [ 1920.508179] env[63024]: }. {{(pid=63024) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1920.509348] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdc96f0f-4f09-4d95-afcf-9cfaa9550d51 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.518624] env[63024]: DEBUG oslo_vmware.rw_handles [None req-758ca82e-1e5b-4167-8551-43564c654821 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c55f24-570f-9575-9046-6712dd20916c/disk-0.vmdk from lease info. {{(pid=63024) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1920.518820] env[63024]: DEBUG oslo_vmware.rw_handles [None req-758ca82e-1e5b-4167-8551-43564c654821 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c55f24-570f-9575-9046-6712dd20916c/disk-0.vmdk for reading. {{(pid=63024) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1920.609467] env[63024]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-f993bd47-25a7-43a0-8fbe-516aafb8ae62 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.704533] env[63024]: DEBUG oslo_vmware.api [None req-fb3bad45-5dcb-4b97-bbc3-55c2a566277f tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951288, 'name': CloneVM_Task, 'duration_secs': 2.093331} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1920.707127] env[63024]: INFO nova.virt.vmwareapi.vmops [None req-fb3bad45-5dcb-4b97-bbc3-55c2a566277f tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Created linked-clone VM from snapshot [ 1920.708214] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b50a849c-c2a4-447b-9c3d-747e03556f92 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.716403] env[63024]: DEBUG nova.virt.vmwareapi.images [None req-fb3bad45-5dcb-4b97-bbc3-55c2a566277f tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Uploading image cb0fc83c-6bf4-49b3-b04e-96ac6460c63c {{(pid=63024) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1920.728221] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb3bad45-5dcb-4b97-bbc3-55c2a566277f tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Destroying the VM {{(pid=63024) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1920.728383] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-4ee9de56-c64d-4c75-b8e6-b3124cfb3e98 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.743049] env[63024]: DEBUG oslo_vmware.api [None req-fb3bad45-5dcb-4b97-bbc3-55c2a566277f tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1920.743049] env[63024]: value = "task-1951300" [ 1920.743049] env[63024]: _type = "Task" [ 1920.743049] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1920.756087] env[63024]: DEBUG oslo_vmware.api [None req-fb3bad45-5dcb-4b97-bbc3-55c2a566277f tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951300, 'name': Destroy_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1920.767035] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76471b6c-8b9b-4eac-b738-919f1ad69965 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.778089] env[63024]: DEBUG nova.compute.manager [None req-632d938b-1db3-45b7-98dc-39fbcb4d2f44 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1920.778496] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-632d938b-1db3-45b7-98dc-39fbcb4d2f44 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1920.779456] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6603dae8-2212-4585-9e22-4162b7ea8e50 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.783061] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c13ce110-d42a-4208-8483-0c2ed974dde8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.793866] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-632d938b-1db3-45b7-98dc-39fbcb4d2f44 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1920.819162] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a5cbf485-8df4-4828-bc0d-d2e0f0569f49 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.822575] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c857f140-be6c-41f6-b81c-b2fd28d96e06 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.834163] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ba5e451-c42b-4e21-bd34-1c12bca8372e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.839559] env[63024]: DEBUG oslo_vmware.api [None req-632d938b-1db3-45b7-98dc-39fbcb4d2f44 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Waiting for the task: (returnval){ [ 1920.839559] env[63024]: value = "task-1951301" [ 1920.839559] env[63024]: _type = "Task" [ 1920.839559] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1920.852470] env[63024]: DEBUG nova.compute.provider_tree [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1920.860717] env[63024]: DEBUG oslo_vmware.api [None req-632d938b-1db3-45b7-98dc-39fbcb4d2f44 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Task: {'id': task-1951301, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1920.870373] env[63024]: DEBUG nova.objects.instance [None req-926b15e0-1446-4562-84da-d49161ba98f4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Lazy-loading 'flavor' on Instance uuid 01b8072a-4483-4932-8294-7e5b48e6b203 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1920.886394] env[63024]: DEBUG nova.compute.manager [req-aea5dfd9-7f7b-486f-890c-dd1d53541050 req-3762b4b1-e866-427a-8382-6813c79f5987 service nova] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Received event network-vif-deleted-a7a97c50-68b5-4301-99d3-7cd47c2d96d8 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1920.886827] env[63024]: INFO nova.compute.manager [req-aea5dfd9-7f7b-486f-890c-dd1d53541050 req-3762b4b1-e866-427a-8382-6813c79f5987 service nova] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Neutron deleted interface a7a97c50-68b5-4301-99d3-7cd47c2d96d8; detaching it from the instance and deleting it from the info cache [ 1920.887208] env[63024]: DEBUG nova.network.neutron [req-aea5dfd9-7f7b-486f-890c-dd1d53541050 req-3762b4b1-e866-427a-8382-6813c79f5987 service nova] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1920.903544] env[63024]: DEBUG nova.network.neutron [-] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1920.920800] env[63024]: DEBUG oslo_vmware.api [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951298, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070149} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1920.921090] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1920.921929] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4686356a-3b7b-46e0-b410-83135a702c28 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.949126] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Reconfiguring VM instance instance-00000051 to attach disk [datastore1] 9e8e7b6e-1bb2-4e66-b734-2f56e31302af/9e8e7b6e-1bb2-4e66-b734-2f56e31302af.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1920.949566] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-11cef0d4-8eab-4459-b3f7-00e9c9ea9f3e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1920.974780] env[63024]: DEBUG oslo_vmware.api [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1920.974780] env[63024]: value = "task-1951302" [ 1920.974780] env[63024]: _type = "Task" [ 1920.974780] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1920.985628] env[63024]: DEBUG oslo_vmware.api [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951302, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1920.994626] env[63024]: DEBUG oslo_vmware.api [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]521d9015-d908-7cc1-ddba-55d91deb4112, 'name': SearchDatastore_Task, 'duration_secs': 0.023272} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1920.995043] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1920.995390] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 6e0aa58b-85e0-4e74-812f-cc01041ed6d3/6e0aa58b-85e0-4e74-812f-cc01041ed6d3.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1920.996617] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f4b5a837-cd7c-4d55-ab41-abcd5a38a039 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.005704] env[63024]: DEBUG oslo_vmware.api [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Waiting for the task: (returnval){ [ 1921.005704] env[63024]: value = "task-1951303" [ 1921.005704] env[63024]: _type = "Task" [ 1921.005704] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1921.006525] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e2f8f78-2088-413e-8d55-b4bcd1bc86ca tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1921.012207] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8da916ef-0f0e-4abf-84a5-2b19c449f2ed {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.024062] env[63024]: DEBUG oslo_vmware.api [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951303, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1921.025912] env[63024]: DEBUG oslo_vmware.api [None req-7e2f8f78-2088-413e-8d55-b4bcd1bc86ca tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Waiting for the task: (returnval){ [ 1921.025912] env[63024]: value = "task-1951304" [ 1921.025912] env[63024]: _type = "Task" [ 1921.025912] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1921.036708] env[63024]: DEBUG oslo_vmware.api [None req-7e2f8f78-2088-413e-8d55-b4bcd1bc86ca tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951304, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1921.243396] env[63024]: DEBUG nova.compute.manager [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1921.260130] env[63024]: DEBUG oslo_vmware.api [None req-fb3bad45-5dcb-4b97-bbc3-55c2a566277f tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951300, 'name': Destroy_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1921.270338] env[63024]: DEBUG nova.virt.hardware [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1921.270686] env[63024]: DEBUG nova.virt.hardware [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1921.270864] env[63024]: DEBUG nova.virt.hardware [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1921.271306] env[63024]: DEBUG nova.virt.hardware [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1921.271561] env[63024]: DEBUG nova.virt.hardware [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1921.271986] env[63024]: DEBUG nova.virt.hardware [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1921.272252] env[63024]: DEBUG nova.virt.hardware [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1921.272448] env[63024]: DEBUG nova.virt.hardware [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1921.272778] env[63024]: DEBUG nova.virt.hardware [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1921.273084] env[63024]: DEBUG nova.virt.hardware [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1921.273370] env[63024]: DEBUG nova.virt.hardware [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1921.274781] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0568021e-e899-4d1c-b412-9ff9358ab7b0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.284933] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3909f1fa-9f98-4692-a25a-9158e7861b8e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.351947] env[63024]: DEBUG oslo_vmware.api [None req-632d938b-1db3-45b7-98dc-39fbcb4d2f44 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Task: {'id': task-1951301, 'name': PowerOffVM_Task, 'duration_secs': 0.347061} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1921.352165] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-632d938b-1db3-45b7-98dc-39fbcb4d2f44 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1921.352323] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-632d938b-1db3-45b7-98dc-39fbcb4d2f44 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1921.352818] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-264cb8f9-4a03-47a2-835c-f2d8e33fcf1b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.356194] env[63024]: DEBUG nova.scheduler.client.report [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1921.376419] env[63024]: DEBUG oslo_concurrency.lockutils [None req-926b15e0-1446-4562-84da-d49161ba98f4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Lock "01b8072a-4483-4932-8294-7e5b48e6b203" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 9.384s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1921.391846] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e3651516-e515-4667-b370-121ab94e904d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.405253] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfc1fe8c-e203-499f-bccd-c4192d51cf74 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.423403] env[63024]: INFO nova.compute.manager [-] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Took 1.40 seconds to deallocate network for instance. [ 1921.478780] env[63024]: DEBUG nova.compute.manager [req-aea5dfd9-7f7b-486f-890c-dd1d53541050 req-3762b4b1-e866-427a-8382-6813c79f5987 service nova] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Detach interface failed, port_id=a7a97c50-68b5-4301-99d3-7cd47c2d96d8, reason: Instance 43cdc362-588f-42cc-a4b2-a08fe60293a5 could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 1921.496334] env[63024]: DEBUG oslo_vmware.api [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951302, 'name': ReconfigVM_Task, 'duration_secs': 0.482201} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1921.496897] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Reconfigured VM instance instance-00000051 to attach disk [datastore1] 9e8e7b6e-1bb2-4e66-b734-2f56e31302af/9e8e7b6e-1bb2-4e66-b734-2f56e31302af.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1921.498828] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-df8ac608-7e7e-447c-a604-a2c00113fbd5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.517613] env[63024]: DEBUG oslo_vmware.api [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1921.517613] env[63024]: value = "task-1951306" [ 1921.517613] env[63024]: _type = "Task" [ 1921.517613] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1921.531060] env[63024]: DEBUG oslo_vmware.api [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951303, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1921.545134] env[63024]: DEBUG oslo_vmware.api [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951306, 'name': Rename_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1921.554611] env[63024]: DEBUG oslo_vmware.api [None req-7e2f8f78-2088-413e-8d55-b4bcd1bc86ca tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951304, 'name': PowerOnVM_Task, 'duration_secs': 0.466871} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1921.555228] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e2f8f78-2088-413e-8d55-b4bcd1bc86ca tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1921.555766] env[63024]: DEBUG nova.compute.manager [None req-7e2f8f78-2088-413e-8d55-b4bcd1bc86ca tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1921.557423] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-573c66dc-3a35-4c50-9b99-07e777c51d21 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.563333] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f2a8d614-8a43-4c00-9803-b4fcff4618da tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "interface-9a7f4452-ae50-4779-8474-11d3a6d3533f-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1921.563657] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f2a8d614-8a43-4c00-9803-b4fcff4618da tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "interface-9a7f4452-ae50-4779-8474-11d3a6d3533f-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1921.564309] env[63024]: DEBUG nova.objects.instance [None req-f2a8d614-8a43-4c00-9803-b4fcff4618da tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lazy-loading 'flavor' on Instance uuid 9a7f4452-ae50-4779-8474-11d3a6d3533f {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1921.580934] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-632d938b-1db3-45b7-98dc-39fbcb4d2f44 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1921.581492] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-632d938b-1db3-45b7-98dc-39fbcb4d2f44 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1921.581869] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-632d938b-1db3-45b7-98dc-39fbcb4d2f44 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Deleting the datastore file [datastore1] 9267e5e4-732d-47f1-8a30-d926a1269fb9 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1921.583435] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-75565552-bb30-4684-80a9-e3cbd28f2469 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.596378] env[63024]: DEBUG oslo_vmware.api [None req-632d938b-1db3-45b7-98dc-39fbcb4d2f44 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Waiting for the task: (returnval){ [ 1921.596378] env[63024]: value = "task-1951307" [ 1921.596378] env[63024]: _type = "Task" [ 1921.596378] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1921.608911] env[63024]: DEBUG oslo_vmware.api [None req-632d938b-1db3-45b7-98dc-39fbcb4d2f44 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Task: {'id': task-1951307, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1921.710474] env[63024]: INFO nova.compute.manager [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Rescuing [ 1921.710900] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquiring lock "refresh_cache-01b8072a-4483-4932-8294-7e5b48e6b203" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1921.710958] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquired lock "refresh_cache-01b8072a-4483-4932-8294-7e5b48e6b203" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1921.711105] env[63024]: DEBUG nova.network.neutron [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1921.761365] env[63024]: DEBUG oslo_vmware.api [None req-fb3bad45-5dcb-4b97-bbc3-55c2a566277f tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951300, 'name': Destroy_Task, 'duration_secs': 0.534346} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1921.761605] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-fb3bad45-5dcb-4b97-bbc3-55c2a566277f tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Destroyed the VM [ 1921.762328] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-fb3bad45-5dcb-4b97-bbc3-55c2a566277f tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Deleting Snapshot of the VM instance {{(pid=63024) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1921.762694] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-0f631096-809a-4989-84dc-fb40c9f4a7c3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.773637] env[63024]: DEBUG oslo_vmware.api [None req-fb3bad45-5dcb-4b97-bbc3-55c2a566277f tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1921.773637] env[63024]: value = "task-1951308" [ 1921.773637] env[63024]: _type = "Task" [ 1921.773637] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1921.786198] env[63024]: DEBUG oslo_vmware.api [None req-fb3bad45-5dcb-4b97-bbc3-55c2a566277f tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951308, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1921.863353] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.645s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1921.863671] env[63024]: DEBUG nova.compute.manager [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1921.866725] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 26.049s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1921.866725] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1921.867051] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63024) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1921.867257] env[63024]: DEBUG oslo_concurrency.lockutils [None req-73bc45f1-8438-455e-b67c-3427a6a5322f tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.890s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1921.867462] env[63024]: DEBUG nova.objects.instance [None req-73bc45f1-8438-455e-b67c-3427a6a5322f tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lazy-loading 'resources' on Instance uuid 56d220f3-b97c-4cbe-b582-c4a4f1171472 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1921.869768] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27617c3b-7fd4-4a80-a738-73624c9b7bd2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.883371] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8338a06e-8bad-4076-bdf7-2f7afcb21c7e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.903736] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdd53490-bad6-4fd9-850d-367fb145b11c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.918023] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5160eb8a-696c-4c98-bb50-dc7214997733 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1921.953292] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4a41005a-6633-4657-bf89-41217217cc32 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1921.954150] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=177601MB free_disk=168GB free_vcpus=48 pci_devices=None {{(pid=63024) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1921.954389] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1921.955655] env[63024]: DEBUG nova.network.neutron [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] Successfully updated port: 6bc8e59c-a429-442e-a0c8-d59867ad2ad9 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1922.019971] env[63024]: DEBUG oslo_vmware.api [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951303, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.693937} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1922.023496] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 6e0aa58b-85e0-4e74-812f-cc01041ed6d3/6e0aa58b-85e0-4e74-812f-cc01041ed6d3.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1922.023995] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1922.024067] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-06b951b3-5a93-4846-a66e-5562fc18ce1d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.036714] env[63024]: DEBUG oslo_vmware.api [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951306, 'name': Rename_Task, 'duration_secs': 0.317143} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1922.038799] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1922.039459] env[63024]: DEBUG oslo_vmware.api [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Waiting for the task: (returnval){ [ 1922.039459] env[63024]: value = "task-1951309" [ 1922.039459] env[63024]: _type = "Task" [ 1922.039459] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1922.039784] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-25ec26c9-7d73-44e9-aefa-81221b6b01a6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.054673] env[63024]: DEBUG oslo_vmware.api [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951309, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1922.056927] env[63024]: DEBUG oslo_vmware.api [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1922.056927] env[63024]: value = "task-1951310" [ 1922.056927] env[63024]: _type = "Task" [ 1922.056927] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1922.069146] env[63024]: DEBUG oslo_vmware.api [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951310, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1922.114272] env[63024]: DEBUG oslo_vmware.api [None req-632d938b-1db3-45b7-98dc-39fbcb4d2f44 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Task: {'id': task-1951307, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.342572} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1922.116142] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-632d938b-1db3-45b7-98dc-39fbcb4d2f44 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1922.116511] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-632d938b-1db3-45b7-98dc-39fbcb4d2f44 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1922.116940] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-632d938b-1db3-45b7-98dc-39fbcb4d2f44 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1922.117332] env[63024]: INFO nova.compute.manager [None req-632d938b-1db3-45b7-98dc-39fbcb4d2f44 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Took 1.34 seconds to destroy the instance on the hypervisor. [ 1922.117955] env[63024]: DEBUG oslo.service.loopingcall [None req-632d938b-1db3-45b7-98dc-39fbcb4d2f44 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1922.122295] env[63024]: DEBUG nova.compute.manager [-] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1922.122538] env[63024]: DEBUG nova.network.neutron [-] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1922.161140] env[63024]: DEBUG nova.objects.instance [None req-f2a8d614-8a43-4c00-9803-b4fcff4618da tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lazy-loading 'pci_requests' on Instance uuid 9a7f4452-ae50-4779-8474-11d3a6d3533f {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1922.285448] env[63024]: DEBUG oslo_vmware.api [None req-fb3bad45-5dcb-4b97-bbc3-55c2a566277f tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951308, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1922.318929] env[63024]: DEBUG nova.compute.manager [req-5a736e58-0f6a-453e-98c6-5a96efcc7760 req-df550e1a-8e43-4f17-b557-f55d5091aec5 service nova] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] Received event network-vif-plugged-6bc8e59c-a429-442e-a0c8-d59867ad2ad9 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1922.319416] env[63024]: DEBUG oslo_concurrency.lockutils [req-5a736e58-0f6a-453e-98c6-5a96efcc7760 req-df550e1a-8e43-4f17-b557-f55d5091aec5 service nova] Acquiring lock "669c45b0-34d6-45f8-a30e-b9b96cfd71ef-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1922.319927] env[63024]: DEBUG oslo_concurrency.lockutils [req-5a736e58-0f6a-453e-98c6-5a96efcc7760 req-df550e1a-8e43-4f17-b557-f55d5091aec5 service nova] Lock "669c45b0-34d6-45f8-a30e-b9b96cfd71ef-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1922.320121] env[63024]: DEBUG oslo_concurrency.lockutils [req-5a736e58-0f6a-453e-98c6-5a96efcc7760 req-df550e1a-8e43-4f17-b557-f55d5091aec5 service nova] Lock "669c45b0-34d6-45f8-a30e-b9b96cfd71ef-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1922.320326] env[63024]: DEBUG nova.compute.manager [req-5a736e58-0f6a-453e-98c6-5a96efcc7760 req-df550e1a-8e43-4f17-b557-f55d5091aec5 service nova] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] No waiting events found dispatching network-vif-plugged-6bc8e59c-a429-442e-a0c8-d59867ad2ad9 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1922.320500] env[63024]: WARNING nova.compute.manager [req-5a736e58-0f6a-453e-98c6-5a96efcc7760 req-df550e1a-8e43-4f17-b557-f55d5091aec5 service nova] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] Received unexpected event network-vif-plugged-6bc8e59c-a429-442e-a0c8-d59867ad2ad9 for instance with vm_state building and task_state spawning. [ 1922.370473] env[63024]: DEBUG nova.compute.utils [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1922.378164] env[63024]: DEBUG nova.compute.manager [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1922.378359] env[63024]: DEBUG nova.network.neutron [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1922.459437] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Acquiring lock "refresh_cache-669c45b0-34d6-45f8-a30e-b9b96cfd71ef" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1922.459828] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Acquired lock "refresh_cache-669c45b0-34d6-45f8-a30e-b9b96cfd71ef" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1922.460223] env[63024]: DEBUG nova.network.neutron [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1922.468530] env[63024]: DEBUG nova.policy [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8d3f9605a2384a919157a571cd164859', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a1166551532c473ca470379b16664513', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1922.484034] env[63024]: DEBUG nova.network.neutron [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Updating instance_info_cache with network_info: [{"id": "7d713c35-a0bd-4dd8-b1a9-b838b3bc7a46", "address": "fa:16:3e:9e:4f:a9", "network": {"id": "20e77237-56c4-40bb-8203-aa035239c938", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1073367986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.156", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12d782556c614caf84a51b37fa43b5de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a64108f9-df0a-4feb-bbb5-97f5841c356c", "external-id": "nsx-vlan-transportzone-67", "segmentation_id": 67, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d713c35-a0", "ovs_interfaceid": "7d713c35-a0bd-4dd8-b1a9-b838b3bc7a46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1922.552512] env[63024]: DEBUG oslo_vmware.api [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951309, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.090366} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1922.559382] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1922.561822] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81cafcf6-662a-48e2-b418-c4ae8b1b9b18 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.576345] env[63024]: DEBUG oslo_vmware.api [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951310, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1922.596614] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Reconfiguring VM instance instance-00000052 to attach disk [datastore1] 6e0aa58b-85e0-4e74-812f-cc01041ed6d3/6e0aa58b-85e0-4e74-812f-cc01041ed6d3.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1922.600252] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-085adc16-c702-4fae-a5b6-545fb7644df6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.623906] env[63024]: DEBUG oslo_vmware.api [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Waiting for the task: (returnval){ [ 1922.623906] env[63024]: value = "task-1951311" [ 1922.623906] env[63024]: _type = "Task" [ 1922.623906] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1922.643483] env[63024]: DEBUG oslo_vmware.api [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951311, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1922.663835] env[63024]: DEBUG nova.objects.base [None req-f2a8d614-8a43-4c00-9803-b4fcff4618da tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Object Instance<9a7f4452-ae50-4779-8474-11d3a6d3533f> lazy-loaded attributes: flavor,pci_requests {{(pid=63024) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1922.664162] env[63024]: DEBUG nova.network.neutron [None req-f2a8d614-8a43-4c00-9803-b4fcff4618da tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1922.710721] env[63024]: DEBUG nova.policy [None req-f2a8d614-8a43-4c00-9803-b4fcff4618da tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6fc84a6eed984429b26a693ce7b0876e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9521048e807c4ca2a6d2e74a72b829a3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1922.789890] env[63024]: DEBUG oslo_vmware.api [None req-fb3bad45-5dcb-4b97-bbc3-55c2a566277f tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951308, 'name': RemoveSnapshot_Task, 'duration_secs': 0.728081} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1922.790294] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-fb3bad45-5dcb-4b97-bbc3-55c2a566277f tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Deleted Snapshot of the VM instance {{(pid=63024) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1922.885545] env[63024]: DEBUG nova.compute.manager [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1922.957626] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10ab246d-09e1-43ea-a552-0bded7e01ce5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1922.967603] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fad9f85d-2e44-4470-a839-5e0e9d98f69b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.008664] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Releasing lock "refresh_cache-01b8072a-4483-4932-8294-7e5b48e6b203" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1923.014206] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83c0370e-ae93-43e9-a0d4-fb508bfefb85 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.021325] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e5c255b-6e28-46b1-a996-58cbb294d4ca {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.038611] env[63024]: DEBUG nova.compute.provider_tree [None req-73bc45f1-8438-455e-b67c-3427a6a5322f tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1923.043341] env[63024]: DEBUG nova.network.neutron [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1923.072855] env[63024]: DEBUG oslo_vmware.api [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951310, 'name': PowerOnVM_Task, 'duration_secs': 0.84378} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1923.073153] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1923.073423] env[63024]: INFO nova.compute.manager [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Took 9.11 seconds to spawn the instance on the hypervisor. [ 1923.073543] env[63024]: DEBUG nova.compute.manager [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1923.074541] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1391e469-dbc6-46c7-84ea-4634ad0aec29 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.080676] env[63024]: DEBUG nova.network.neutron [None req-f2a8d614-8a43-4c00-9803-b4fcff4618da tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Successfully created port: 42f23b07-4f23-454c-bdba-e075cd549205 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1923.095205] env[63024]: DEBUG nova.network.neutron [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Successfully created port: 14537c97-623f-4d93-80a9-8cd7457a0a75 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1923.139585] env[63024]: DEBUG oslo_vmware.api [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951311, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1923.299765] env[63024]: WARNING nova.compute.manager [None req-fb3bad45-5dcb-4b97-bbc3-55c2a566277f tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Image not found during snapshot: nova.exception.ImageNotFound: Image cb0fc83c-6bf4-49b3-b04e-96ac6460c63c could not be found. [ 1923.396646] env[63024]: DEBUG nova.network.neutron [-] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1923.405534] env[63024]: DEBUG nova.network.neutron [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] Updating instance_info_cache with network_info: [{"id": "6bc8e59c-a429-442e-a0c8-d59867ad2ad9", "address": "fa:16:3e:43:30:29", "network": {"id": "9e97434d-c36c-478d-a559-df5b5d8bcd77", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-630350621-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a1166551532c473ca470379b16664513", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4055505f-97ab-400b-969c-43d99b38fd48", "external-id": "nsx-vlan-transportzone-952", "segmentation_id": 952, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6bc8e59c-a4", "ovs_interfaceid": "6bc8e59c-a429-442e-a0c8-d59867ad2ad9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1923.549195] env[63024]: DEBUG nova.scheduler.client.report [None req-73bc45f1-8438-455e-b67c-3427a6a5322f tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1923.599326] env[63024]: DEBUG nova.compute.manager [req-6d98b6f7-a8df-4e1e-8c83-34de0f5f7c6e req-703a48c8-1ff7-442b-8d0b-25819630e24b service nova] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Received event network-vif-deleted-f4b23f8c-5413-42ca-abeb-eda669ea2fe5 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1923.604386] env[63024]: INFO nova.compute.manager [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Took 32.36 seconds to build instance. [ 1923.640833] env[63024]: DEBUG oslo_vmware.api [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951311, 'name': ReconfigVM_Task, 'duration_secs': 0.52763} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1923.641143] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Reconfigured VM instance instance-00000052 to attach disk [datastore1] 6e0aa58b-85e0-4e74-812f-cc01041ed6d3/6e0aa58b-85e0-4e74-812f-cc01041ed6d3.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1923.642713] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ffaa4fd5-d69c-4bde-8e30-25765cbff5a5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.654071] env[63024]: DEBUG oslo_vmware.api [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Waiting for the task: (returnval){ [ 1923.654071] env[63024]: value = "task-1951312" [ 1923.654071] env[63024]: _type = "Task" [ 1923.654071] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1923.667338] env[63024]: DEBUG oslo_vmware.api [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951312, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1923.903756] env[63024]: INFO nova.compute.manager [-] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Took 1.78 seconds to deallocate network for instance. [ 1923.905059] env[63024]: DEBUG nova.compute.manager [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1923.908970] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Releasing lock "refresh_cache-669c45b0-34d6-45f8-a30e-b9b96cfd71ef" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1923.910687] env[63024]: DEBUG nova.compute.manager [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] Instance network_info: |[{"id": "6bc8e59c-a429-442e-a0c8-d59867ad2ad9", "address": "fa:16:3e:43:30:29", "network": {"id": "9e97434d-c36c-478d-a559-df5b5d8bcd77", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-630350621-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a1166551532c473ca470379b16664513", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4055505f-97ab-400b-969c-43d99b38fd48", "external-id": "nsx-vlan-transportzone-952", "segmentation_id": 952, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6bc8e59c-a4", "ovs_interfaceid": "6bc8e59c-a429-442e-a0c8-d59867ad2ad9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1923.912554] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:43:30:29', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4055505f-97ab-400b-969c-43d99b38fd48', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6bc8e59c-a429-442e-a0c8-d59867ad2ad9', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1923.921263] env[63024]: DEBUG oslo.service.loopingcall [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1923.922367] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1923.922367] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6d1de3a5-a525-43db-aa80-992711732f61 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.945684] env[63024]: DEBUG nova.virt.hardware [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1923.945941] env[63024]: DEBUG nova.virt.hardware [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1923.946121] env[63024]: DEBUG nova.virt.hardware [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1923.946310] env[63024]: DEBUG nova.virt.hardware [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1923.946944] env[63024]: DEBUG nova.virt.hardware [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1923.947285] env[63024]: DEBUG nova.virt.hardware [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1923.947413] env[63024]: DEBUG nova.virt.hardware [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1923.947581] env[63024]: DEBUG nova.virt.hardware [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1923.947755] env[63024]: DEBUG nova.virt.hardware [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1923.947925] env[63024]: DEBUG nova.virt.hardware [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1923.948120] env[63024]: DEBUG nova.virt.hardware [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1923.949037] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71b74d67-ce7a-4f03-833b-d2aeebd33319 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.953328] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1923.953328] env[63024]: value = "task-1951313" [ 1923.953328] env[63024]: _type = "Task" [ 1923.953328] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1923.963677] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-105d7080-b681-42dd-b041-3854c8224bc6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1923.967685] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951313, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1924.055921] env[63024]: DEBUG oslo_concurrency.lockutils [None req-73bc45f1-8438-455e-b67c-3427a6a5322f tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.188s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1924.059571] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 26.009s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1924.060156] env[63024]: DEBUG nova.objects.instance [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63024) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1924.064266] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1924.064939] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7f6368a4-eadd-453f-82ce-a5e5898b9786 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.075805] env[63024]: DEBUG oslo_vmware.api [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1924.075805] env[63024]: value = "task-1951314" [ 1924.075805] env[63024]: _type = "Task" [ 1924.075805] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1924.085858] env[63024]: DEBUG oslo_vmware.api [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951314, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1924.087261] env[63024]: INFO nova.scheduler.client.report [None req-73bc45f1-8438-455e-b67c-3427a6a5322f tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Deleted allocations for instance 56d220f3-b97c-4cbe-b582-c4a4f1171472 [ 1924.106597] env[63024]: DEBUG oslo_concurrency.lockutils [None req-305af8f4-1c86-4ad3-821d-e7674f79b1e7 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "9e8e7b6e-1bb2-4e66-b734-2f56e31302af" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.876s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1924.169030] env[63024]: DEBUG oslo_vmware.api [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951312, 'name': Rename_Task, 'duration_secs': 0.395407} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1924.169465] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1924.169877] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4ec7e191-72b1-4037-b293-ddf29091db38 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.179828] env[63024]: DEBUG oslo_vmware.api [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Waiting for the task: (returnval){ [ 1924.179828] env[63024]: value = "task-1951315" [ 1924.179828] env[63024]: _type = "Task" [ 1924.179828] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1924.189345] env[63024]: DEBUG oslo_vmware.api [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951315, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1924.423625] env[63024]: DEBUG oslo_concurrency.lockutils [None req-632d938b-1db3-45b7-98dc-39fbcb4d2f44 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1924.465551] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951313, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1924.588462] env[63024]: DEBUG oslo_vmware.api [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951314, 'name': PowerOffVM_Task, 'duration_secs': 0.407115} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1924.588728] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1924.589847] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-634e81bc-e596-4f9d-a362-d71deec5884e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.595805] env[63024]: DEBUG oslo_concurrency.lockutils [None req-73bc45f1-8438-455e-b67c-3427a6a5322f tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "56d220f3-b97c-4cbe-b582-c4a4f1171472" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.870s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1924.596781] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "56d220f3-b97c-4cbe-b582-c4a4f1171472" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 29.358s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1924.596962] env[63024]: INFO nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] During sync_power_state the instance has a pending task (deleting). Skip. [ 1924.597112] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "56d220f3-b97c-4cbe-b582-c4a4f1171472" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1924.623537] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28ab523f-da41-4879-a847-18d2cc83e2f7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.660548] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1924.660880] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9595f544-86f0-4e73-8ef6-00eb1fc59ac0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.669982] env[63024]: DEBUG oslo_vmware.api [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1924.669982] env[63024]: value = "task-1951316" [ 1924.669982] env[63024]: _type = "Task" [ 1924.669982] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1924.681312] env[63024]: DEBUG oslo_vmware.api [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951316, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1924.691382] env[63024]: DEBUG oslo_vmware.api [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951315, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1924.738203] env[63024]: DEBUG nova.network.neutron [None req-f2a8d614-8a43-4c00-9803-b4fcff4618da tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Successfully updated port: 42f23b07-4f23-454c-bdba-e075cd549205 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1924.874906] env[63024]: DEBUG nova.compute.manager [req-cfb1b25d-7b24-4aae-8eb3-18ac68548736 req-6fcd74aa-0dc2-4247-9322-ef93f1e64a03 service nova] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] Received event network-changed-6bc8e59c-a429-442e-a0c8-d59867ad2ad9 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1924.876566] env[63024]: DEBUG nova.compute.manager [req-cfb1b25d-7b24-4aae-8eb3-18ac68548736 req-6fcd74aa-0dc2-4247-9322-ef93f1e64a03 service nova] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] Refreshing instance network info cache due to event network-changed-6bc8e59c-a429-442e-a0c8-d59867ad2ad9. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1924.876844] env[63024]: DEBUG oslo_concurrency.lockutils [req-cfb1b25d-7b24-4aae-8eb3-18ac68548736 req-6fcd74aa-0dc2-4247-9322-ef93f1e64a03 service nova] Acquiring lock "refresh_cache-669c45b0-34d6-45f8-a30e-b9b96cfd71ef" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1924.877015] env[63024]: DEBUG oslo_concurrency.lockutils [req-cfb1b25d-7b24-4aae-8eb3-18ac68548736 req-6fcd74aa-0dc2-4247-9322-ef93f1e64a03 service nova] Acquired lock "refresh_cache-669c45b0-34d6-45f8-a30e-b9b96cfd71ef" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1924.877211] env[63024]: DEBUG nova.network.neutron [req-cfb1b25d-7b24-4aae-8eb3-18ac68548736 req-6fcd74aa-0dc2-4247-9322-ef93f1e64a03 service nova] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] Refreshing network info cache for port 6bc8e59c-a429-442e-a0c8-d59867ad2ad9 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1924.968797] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951313, 'name': CreateVM_Task, 'duration_secs': 0.629859} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1924.969174] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1924.969806] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1924.969965] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1924.970320] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1924.970587] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d9a4a2cb-6261-4fad-ad46-eb02351020ad {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1924.977041] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Waiting for the task: (returnval){ [ 1924.977041] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e2c4a3-0f42-5216-03b3-79150c5d6a5f" [ 1924.977041] env[63024]: _type = "Task" [ 1924.977041] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1924.982818] env[63024]: DEBUG nova.network.neutron [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Successfully updated port: 14537c97-623f-4d93-80a9-8cd7457a0a75 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1924.988318] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e2c4a3-0f42-5216-03b3-79150c5d6a5f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1925.072608] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9c80a38b-8c13-4bc0-8f5a-91dd4a601b6a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1925.073870] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.324s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1925.075946] env[63024]: INFO nova.compute.claims [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1925.187240] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] VM already powered off {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1925.187536] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1925.187799] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1925.187955] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1925.188315] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1925.188713] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8ae2144d-d59f-44c4-ad1f-bbd03255eaca {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.199077] env[63024]: DEBUG oslo_vmware.api [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951315, 'name': PowerOnVM_Task, 'duration_secs': 0.82173} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1925.200640] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1925.200856] env[63024]: INFO nova.compute.manager [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Took 8.61 seconds to spawn the instance on the hypervisor. [ 1925.201149] env[63024]: DEBUG nova.compute.manager [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1925.201557] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1925.201775] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1925.203615] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fcb298a-e658-4641-8b5d-ce6c97f0cd9a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.206836] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d365ff6f-bb3d-4da1-9c1a-722bc57dfa44 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.214572] env[63024]: DEBUG oslo_vmware.api [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1925.214572] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]527f2eb4-f626-8c95-5684-87035a4a8928" [ 1925.214572] env[63024]: _type = "Task" [ 1925.214572] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1925.231471] env[63024]: DEBUG oslo_vmware.api [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]527f2eb4-f626-8c95-5684-87035a4a8928, 'name': SearchDatastore_Task, 'duration_secs': 0.012796} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1925.232419] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b440407-3307-4977-bb54-4af7ec0cad9d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.241258] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f2a8d614-8a43-4c00-9803-b4fcff4618da tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "refresh_cache-9a7f4452-ae50-4779-8474-11d3a6d3533f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1925.241446] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f2a8d614-8a43-4c00-9803-b4fcff4618da tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquired lock "refresh_cache-9a7f4452-ae50-4779-8474-11d3a6d3533f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1925.241758] env[63024]: DEBUG nova.network.neutron [None req-f2a8d614-8a43-4c00-9803-b4fcff4618da tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1925.244181] env[63024]: DEBUG oslo_vmware.api [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1925.244181] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52a0925a-4059-d46b-5d57-fb69b399aa22" [ 1925.244181] env[63024]: _type = "Task" [ 1925.244181] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1925.258512] env[63024]: DEBUG oslo_vmware.api [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52a0925a-4059-d46b-5d57-fb69b399aa22, 'name': SearchDatastore_Task, 'duration_secs': 0.012969} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1925.258512] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1925.258512] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 01b8072a-4483-4932-8294-7e5b48e6b203/2646ca61-612e-4bc3-97f7-ee492c048835-rescue.vmdk. {{(pid=63024) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1925.258706] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0f13023d-4ba5-4d2d-9128-2223f30e9fd9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.269347] env[63024]: DEBUG oslo_vmware.api [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1925.269347] env[63024]: value = "task-1951317" [ 1925.269347] env[63024]: _type = "Task" [ 1925.269347] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1925.280678] env[63024]: DEBUG oslo_vmware.api [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951317, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1925.494104] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Acquiring lock "refresh_cache-96afa44e-d8c6-419c-ae69-04b7b306c2c5" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1925.494331] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Acquired lock "refresh_cache-96afa44e-d8c6-419c-ae69-04b7b306c2c5" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1925.494458] env[63024]: DEBUG nova.network.neutron [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1925.495881] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e2c4a3-0f42-5216-03b3-79150c5d6a5f, 'name': SearchDatastore_Task, 'duration_secs': 0.0124} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1925.496467] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1925.496709] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1925.496975] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1925.497147] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1925.497377] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1925.497677] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ef3154fd-31b2-48c7-bd4f-c745f82a6629 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.518489] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1925.518489] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1925.518874] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d89b263d-118c-4dfa-b296-5dffde71f6ec {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.530056] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Waiting for the task: (returnval){ [ 1925.530056] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]520d1018-c4e1-913f-9972-d4c02eb063b2" [ 1925.530056] env[63024]: _type = "Task" [ 1925.530056] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1925.539669] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]520d1018-c4e1-913f-9972-d4c02eb063b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1925.571482] env[63024]: INFO nova.compute.manager [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Rebuilding instance [ 1925.628450] env[63024]: DEBUG nova.compute.manager [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1925.629453] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-517b15c0-b0ba-41db-8559-8b42796f43e1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1925.729564] env[63024]: INFO nova.compute.manager [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Took 34.24 seconds to build instance. [ 1925.788605] env[63024]: DEBUG oslo_vmware.api [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951317, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1925.790749] env[63024]: DEBUG nova.network.neutron [req-cfb1b25d-7b24-4aae-8eb3-18ac68548736 req-6fcd74aa-0dc2-4247-9322-ef93f1e64a03 service nova] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] Updated VIF entry in instance network info cache for port 6bc8e59c-a429-442e-a0c8-d59867ad2ad9. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1925.790749] env[63024]: DEBUG nova.network.neutron [req-cfb1b25d-7b24-4aae-8eb3-18ac68548736 req-6fcd74aa-0dc2-4247-9322-ef93f1e64a03 service nova] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] Updating instance_info_cache with network_info: [{"id": "6bc8e59c-a429-442e-a0c8-d59867ad2ad9", "address": "fa:16:3e:43:30:29", "network": {"id": "9e97434d-c36c-478d-a559-df5b5d8bcd77", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-630350621-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a1166551532c473ca470379b16664513", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4055505f-97ab-400b-969c-43d99b38fd48", "external-id": "nsx-vlan-transportzone-952", "segmentation_id": 952, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6bc8e59c-a4", "ovs_interfaceid": "6bc8e59c-a429-442e-a0c8-d59867ad2ad9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1925.802757] env[63024]: WARNING nova.network.neutron [None req-f2a8d614-8a43-4c00-9803-b4fcff4618da tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] ffb24eaf-c6b6-414f-a69a-0c8806712ddd already exists in list: networks containing: ['ffb24eaf-c6b6-414f-a69a-0c8806712ddd']. ignoring it [ 1926.039241] env[63024]: DEBUG nova.network.neutron [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1926.046355] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]520d1018-c4e1-913f-9972-d4c02eb063b2, 'name': SearchDatastore_Task, 'duration_secs': 0.074533} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1926.047141] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bbed7af7-45e0-4d9e-8495-628b075a9c8c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.053704] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Waiting for the task: (returnval){ [ 1926.053704] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5219a3cd-27f7-bba9-bdf7-803b7edbc279" [ 1926.053704] env[63024]: _type = "Task" [ 1926.053704] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1926.062377] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5219a3cd-27f7-bba9-bdf7-803b7edbc279, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.108360] env[63024]: DEBUG nova.network.neutron [None req-f2a8d614-8a43-4c00-9803-b4fcff4618da tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Updating instance_info_cache with network_info: [{"id": "989997b7-12bd-4924-97e2-a65914c47536", "address": "fa:16:3e:3b:9f:01", "network": {"id": "ffb24eaf-c6b6-414f-a69a-0c8806712ddd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-281590202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.234", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9521048e807c4ca2a6d2e74a72b829a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap989997b7-12", "ovs_interfaceid": "989997b7-12bd-4924-97e2-a65914c47536", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "42f23b07-4f23-454c-bdba-e075cd549205", "address": "fa:16:3e:e2:e8:c5", "network": {"id": "ffb24eaf-c6b6-414f-a69a-0c8806712ddd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-281590202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9521048e807c4ca2a6d2e74a72b829a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap42f23b07-4f", "ovs_interfaceid": "42f23b07-4f23-454c-bdba-e075cd549205", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1926.229934] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2ef42504-01b3-4a87-a68a-adce67b33e8c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Lock "6e0aa58b-85e0-4e74-812f-cc01041ed6d3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.757s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1926.262057] env[63024]: DEBUG nova.network.neutron [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Updating instance_info_cache with network_info: [{"id": "14537c97-623f-4d93-80a9-8cd7457a0a75", "address": "fa:16:3e:35:bc:c8", "network": {"id": "9e97434d-c36c-478d-a559-df5b5d8bcd77", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-630350621-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a1166551532c473ca470379b16664513", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4055505f-97ab-400b-969c-43d99b38fd48", "external-id": "nsx-vlan-transportzone-952", "segmentation_id": 952, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14537c97-62", "ovs_interfaceid": "14537c97-623f-4d93-80a9-8cd7457a0a75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1926.284132] env[63024]: DEBUG oslo_concurrency.lockutils [None req-754cf9a4-b4ef-4ef4-aa73-f36ae6252b97 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquiring lock "a0a9ea07-dda8-45b4-bab9-cdaf683c0a21" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1926.284404] env[63024]: DEBUG oslo_concurrency.lockutils [None req-754cf9a4-b4ef-4ef4-aa73-f36ae6252b97 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "a0a9ea07-dda8-45b4-bab9-cdaf683c0a21" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1926.284702] env[63024]: DEBUG oslo_concurrency.lockutils [None req-754cf9a4-b4ef-4ef4-aa73-f36ae6252b97 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquiring lock "a0a9ea07-dda8-45b4-bab9-cdaf683c0a21-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1926.284800] env[63024]: DEBUG oslo_concurrency.lockutils [None req-754cf9a4-b4ef-4ef4-aa73-f36ae6252b97 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "a0a9ea07-dda8-45b4-bab9-cdaf683c0a21-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1926.284952] env[63024]: DEBUG oslo_concurrency.lockutils [None req-754cf9a4-b4ef-4ef4-aa73-f36ae6252b97 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "a0a9ea07-dda8-45b4-bab9-cdaf683c0a21-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1926.286525] env[63024]: DEBUG oslo_vmware.api [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951317, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.620737} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1926.286972] env[63024]: INFO nova.compute.manager [None req-754cf9a4-b4ef-4ef4-aa73-f36ae6252b97 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Terminating instance [ 1926.291375] env[63024]: INFO nova.virt.vmwareapi.ds_util [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 01b8072a-4483-4932-8294-7e5b48e6b203/2646ca61-612e-4bc3-97f7-ee492c048835-rescue.vmdk. [ 1926.294084] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cab8520-1f14-4c8f-b642-ac0ecd554898 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.296904] env[63024]: DEBUG oslo_concurrency.lockutils [req-cfb1b25d-7b24-4aae-8eb3-18ac68548736 req-6fcd74aa-0dc2-4247-9322-ef93f1e64a03 service nova] Releasing lock "refresh_cache-669c45b0-34d6-45f8-a30e-b9b96cfd71ef" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1926.324728] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Reconfiguring VM instance instance-00000048 to attach disk [datastore1] 01b8072a-4483-4932-8294-7e5b48e6b203/2646ca61-612e-4bc3-97f7-ee492c048835-rescue.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1926.327894] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-58d9f6b2-350c-42c0-a33f-889cb2823548 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.348796] env[63024]: DEBUG oslo_vmware.api [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1926.348796] env[63024]: value = "task-1951318" [ 1926.348796] env[63024]: _type = "Task" [ 1926.348796] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1926.363271] env[63024]: DEBUG oslo_vmware.api [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951318, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.548494] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e78401dd-068d-41f2-b6ac-dd5cfe8e4c39 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.560669] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fe6ba03-b017-4ce0-a1cc-dba496fe635a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.567594] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5219a3cd-27f7-bba9-bdf7-803b7edbc279, 'name': SearchDatastore_Task, 'duration_secs': 0.012294} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1926.568259] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1926.568535] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 669c45b0-34d6-45f8-a30e-b9b96cfd71ef/669c45b0-34d6-45f8-a30e-b9b96cfd71ef.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1926.568802] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5c2e24df-bbdd-469c-a146-b6589ad7555e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.597189] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf109710-ff6c-447e-a0bf-4b34c1efdc61 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.604606] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Waiting for the task: (returnval){ [ 1926.604606] env[63024]: value = "task-1951319" [ 1926.604606] env[63024]: _type = "Task" [ 1926.604606] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1926.610569] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc9f5118-e22a-40aa-a627-39b8f65a8c84 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.617884] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f2a8d614-8a43-4c00-9803-b4fcff4618da tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Releasing lock "refresh_cache-9a7f4452-ae50-4779-8474-11d3a6d3533f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1926.618642] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f2a8d614-8a43-4c00-9803-b4fcff4618da tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "9a7f4452-ae50-4779-8474-11d3a6d3533f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1926.618954] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f2a8d614-8a43-4c00-9803-b4fcff4618da tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquired lock "9a7f4452-ae50-4779-8474-11d3a6d3533f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1926.619715] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf743c92-8306-4332-893c-30af3cc06769 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.626178] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951319, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.634596] env[63024]: DEBUG nova.compute.provider_tree [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1926.648605] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1926.649345] env[63024]: DEBUG nova.virt.hardware [None req-f2a8d614-8a43-4c00-9803-b4fcff4618da tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1926.649604] env[63024]: DEBUG nova.virt.hardware [None req-f2a8d614-8a43-4c00-9803-b4fcff4618da tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1926.649762] env[63024]: DEBUG nova.virt.hardware [None req-f2a8d614-8a43-4c00-9803-b4fcff4618da tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1926.649948] env[63024]: DEBUG nova.virt.hardware [None req-f2a8d614-8a43-4c00-9803-b4fcff4618da tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1926.650111] env[63024]: DEBUG nova.virt.hardware [None req-f2a8d614-8a43-4c00-9803-b4fcff4618da tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1926.650263] env[63024]: DEBUG nova.virt.hardware [None req-f2a8d614-8a43-4c00-9803-b4fcff4618da tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1926.650474] env[63024]: DEBUG nova.virt.hardware [None req-f2a8d614-8a43-4c00-9803-b4fcff4618da tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1926.650635] env[63024]: DEBUG nova.virt.hardware [None req-f2a8d614-8a43-4c00-9803-b4fcff4618da tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1926.650804] env[63024]: DEBUG nova.virt.hardware [None req-f2a8d614-8a43-4c00-9803-b4fcff4618da tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1926.650972] env[63024]: DEBUG nova.virt.hardware [None req-f2a8d614-8a43-4c00-9803-b4fcff4618da tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1926.651174] env[63024]: DEBUG nova.virt.hardware [None req-f2a8d614-8a43-4c00-9803-b4fcff4618da tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1926.657525] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f2a8d614-8a43-4c00-9803-b4fcff4618da tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Reconfiguring VM to attach interface {{(pid=63024) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1926.658741] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7564c75c-efb5-4220-ace8-8b61e6895031 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.660532] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-89f7edbd-e2dc-4625-ab0a-7e61f72fbc6c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.682335] env[63024]: DEBUG oslo_vmware.api [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 1926.682335] env[63024]: value = "task-1951320" [ 1926.682335] env[63024]: _type = "Task" [ 1926.682335] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1926.682603] env[63024]: DEBUG oslo_vmware.api [None req-f2a8d614-8a43-4c00-9803-b4fcff4618da tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 1926.682603] env[63024]: value = "task-1951321" [ 1926.682603] env[63024]: _type = "Task" [ 1926.682603] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1926.696916] env[63024]: DEBUG oslo_vmware.api [None req-f2a8d614-8a43-4c00-9803-b4fcff4618da tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951321, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.699667] env[63024]: DEBUG oslo_vmware.api [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951320, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.724807] env[63024]: DEBUG nova.compute.manager [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Stashing vm_state: active {{(pid=63024) _prep_resize /opt/stack/nova/nova/compute/manager.py:5954}} [ 1926.768056] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Releasing lock "refresh_cache-96afa44e-d8c6-419c-ae69-04b7b306c2c5" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1926.768245] env[63024]: DEBUG nova.compute.manager [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Instance network_info: |[{"id": "14537c97-623f-4d93-80a9-8cd7457a0a75", "address": "fa:16:3e:35:bc:c8", "network": {"id": "9e97434d-c36c-478d-a559-df5b5d8bcd77", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-630350621-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a1166551532c473ca470379b16664513", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4055505f-97ab-400b-969c-43d99b38fd48", "external-id": "nsx-vlan-transportzone-952", "segmentation_id": 952, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14537c97-62", "ovs_interfaceid": "14537c97-623f-4d93-80a9-8cd7457a0a75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1926.768581] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:35:bc:c8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4055505f-97ab-400b-969c-43d99b38fd48', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '14537c97-623f-4d93-80a9-8cd7457a0a75', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1926.776627] env[63024]: DEBUG oslo.service.loopingcall [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1926.778284] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1926.778284] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bad45b89-fa70-4fa6-8fad-34a49e2167a4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.797832] env[63024]: DEBUG nova.compute.manager [None req-754cf9a4-b4ef-4ef4-aa73-f36ae6252b97 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1926.798087] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-754cf9a4-b4ef-4ef4-aa73-f36ae6252b97 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1926.799755] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9c31e53-6cd8-409d-a45b-ee9d3aca39ed {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.806170] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1926.806170] env[63024]: value = "task-1951322" [ 1926.806170] env[63024]: _type = "Task" [ 1926.806170] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1926.811730] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-754cf9a4-b4ef-4ef4-aa73-f36ae6252b97 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1926.812293] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-71658f95-7c8d-4060-89bd-04f30aa24223 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1926.818062] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951322, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.825328] env[63024]: DEBUG oslo_vmware.api [None req-754cf9a4-b4ef-4ef4-aa73-f36ae6252b97 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1926.825328] env[63024]: value = "task-1951323" [ 1926.825328] env[63024]: _type = "Task" [ 1926.825328] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1926.836483] env[63024]: DEBUG oslo_vmware.api [None req-754cf9a4-b4ef-4ef4-aa73-f36ae6252b97 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951323, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1926.844270] env[63024]: DEBUG nova.compute.manager [req-4fc4e7bf-75b4-4ce6-a384-b438bf0e29d8 req-59d82095-8a01-4303-ab5a-ef5ab387d9bf service nova] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Received event network-vif-plugged-14537c97-623f-4d93-80a9-8cd7457a0a75 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1926.844541] env[63024]: DEBUG oslo_concurrency.lockutils [req-4fc4e7bf-75b4-4ce6-a384-b438bf0e29d8 req-59d82095-8a01-4303-ab5a-ef5ab387d9bf service nova] Acquiring lock "96afa44e-d8c6-419c-ae69-04b7b306c2c5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1926.844840] env[63024]: DEBUG oslo_concurrency.lockutils [req-4fc4e7bf-75b4-4ce6-a384-b438bf0e29d8 req-59d82095-8a01-4303-ab5a-ef5ab387d9bf service nova] Lock "96afa44e-d8c6-419c-ae69-04b7b306c2c5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1926.845074] env[63024]: DEBUG oslo_concurrency.lockutils [req-4fc4e7bf-75b4-4ce6-a384-b438bf0e29d8 req-59d82095-8a01-4303-ab5a-ef5ab387d9bf service nova] Lock "96afa44e-d8c6-419c-ae69-04b7b306c2c5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1926.846129] env[63024]: DEBUG nova.compute.manager [req-4fc4e7bf-75b4-4ce6-a384-b438bf0e29d8 req-59d82095-8a01-4303-ab5a-ef5ab387d9bf service nova] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] No waiting events found dispatching network-vif-plugged-14537c97-623f-4d93-80a9-8cd7457a0a75 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1926.846432] env[63024]: WARNING nova.compute.manager [req-4fc4e7bf-75b4-4ce6-a384-b438bf0e29d8 req-59d82095-8a01-4303-ab5a-ef5ab387d9bf service nova] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Received unexpected event network-vif-plugged-14537c97-623f-4d93-80a9-8cd7457a0a75 for instance with vm_state building and task_state spawning. [ 1926.846726] env[63024]: DEBUG nova.compute.manager [req-4fc4e7bf-75b4-4ce6-a384-b438bf0e29d8 req-59d82095-8a01-4303-ab5a-ef5ab387d9bf service nova] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Received event network-changed-14537c97-623f-4d93-80a9-8cd7457a0a75 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1926.846992] env[63024]: DEBUG nova.compute.manager [req-4fc4e7bf-75b4-4ce6-a384-b438bf0e29d8 req-59d82095-8a01-4303-ab5a-ef5ab387d9bf service nova] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Refreshing instance network info cache due to event network-changed-14537c97-623f-4d93-80a9-8cd7457a0a75. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1926.847707] env[63024]: DEBUG oslo_concurrency.lockutils [req-4fc4e7bf-75b4-4ce6-a384-b438bf0e29d8 req-59d82095-8a01-4303-ab5a-ef5ab387d9bf service nova] Acquiring lock "refresh_cache-96afa44e-d8c6-419c-ae69-04b7b306c2c5" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1926.847707] env[63024]: DEBUG oslo_concurrency.lockutils [req-4fc4e7bf-75b4-4ce6-a384-b438bf0e29d8 req-59d82095-8a01-4303-ab5a-ef5ab387d9bf service nova] Acquired lock "refresh_cache-96afa44e-d8c6-419c-ae69-04b7b306c2c5" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1926.847835] env[63024]: DEBUG nova.network.neutron [req-4fc4e7bf-75b4-4ce6-a384-b438bf0e29d8 req-59d82095-8a01-4303-ab5a-ef5ab387d9bf service nova] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Refreshing network info cache for port 14537c97-623f-4d93-80a9-8cd7457a0a75 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1926.866958] env[63024]: DEBUG oslo_vmware.api [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951318, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.078222] env[63024]: DEBUG oslo_concurrency.lockutils [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "9e32eb32-6eff-4875-b4a3-adfab4647023" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1927.078571] env[63024]: DEBUG oslo_concurrency.lockutils [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "9e32eb32-6eff-4875-b4a3-adfab4647023" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1927.115247] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951319, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.137809] env[63024]: DEBUG nova.scheduler.client.report [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1927.197760] env[63024]: DEBUG oslo_vmware.api [None req-f2a8d614-8a43-4c00-9803-b4fcff4618da tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951321, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.201277] env[63024]: DEBUG oslo_vmware.api [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951320, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.248209] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1927.316621] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951322, 'name': CreateVM_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.336738] env[63024]: DEBUG oslo_vmware.api [None req-754cf9a4-b4ef-4ef4-aa73-f36ae6252b97 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951323, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.341013] env[63024]: DEBUG nova.compute.manager [req-915c9c37-e9a8-4dad-bf5b-7d1586c1f00e req-63a3627e-cc83-4cf9-9067-cacc96eeaf78 service nova] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Received event network-vif-plugged-42f23b07-4f23-454c-bdba-e075cd549205 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1927.341237] env[63024]: DEBUG oslo_concurrency.lockutils [req-915c9c37-e9a8-4dad-bf5b-7d1586c1f00e req-63a3627e-cc83-4cf9-9067-cacc96eeaf78 service nova] Acquiring lock "9a7f4452-ae50-4779-8474-11d3a6d3533f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1927.341406] env[63024]: DEBUG oslo_concurrency.lockutils [req-915c9c37-e9a8-4dad-bf5b-7d1586c1f00e req-63a3627e-cc83-4cf9-9067-cacc96eeaf78 service nova] Lock "9a7f4452-ae50-4779-8474-11d3a6d3533f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1927.341583] env[63024]: DEBUG oslo_concurrency.lockutils [req-915c9c37-e9a8-4dad-bf5b-7d1586c1f00e req-63a3627e-cc83-4cf9-9067-cacc96eeaf78 service nova] Lock "9a7f4452-ae50-4779-8474-11d3a6d3533f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1927.341821] env[63024]: DEBUG nova.compute.manager [req-915c9c37-e9a8-4dad-bf5b-7d1586c1f00e req-63a3627e-cc83-4cf9-9067-cacc96eeaf78 service nova] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] No waiting events found dispatching network-vif-plugged-42f23b07-4f23-454c-bdba-e075cd549205 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1927.342012] env[63024]: WARNING nova.compute.manager [req-915c9c37-e9a8-4dad-bf5b-7d1586c1f00e req-63a3627e-cc83-4cf9-9067-cacc96eeaf78 service nova] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Received unexpected event network-vif-plugged-42f23b07-4f23-454c-bdba-e075cd549205 for instance with vm_state active and task_state None. [ 1927.342199] env[63024]: DEBUG nova.compute.manager [req-915c9c37-e9a8-4dad-bf5b-7d1586c1f00e req-63a3627e-cc83-4cf9-9067-cacc96eeaf78 service nova] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Received event network-changed-42f23b07-4f23-454c-bdba-e075cd549205 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1927.342359] env[63024]: DEBUG nova.compute.manager [req-915c9c37-e9a8-4dad-bf5b-7d1586c1f00e req-63a3627e-cc83-4cf9-9067-cacc96eeaf78 service nova] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Refreshing instance network info cache due to event network-changed-42f23b07-4f23-454c-bdba-e075cd549205. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1927.342584] env[63024]: DEBUG oslo_concurrency.lockutils [req-915c9c37-e9a8-4dad-bf5b-7d1586c1f00e req-63a3627e-cc83-4cf9-9067-cacc96eeaf78 service nova] Acquiring lock "refresh_cache-9a7f4452-ae50-4779-8474-11d3a6d3533f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1927.342729] env[63024]: DEBUG oslo_concurrency.lockutils [req-915c9c37-e9a8-4dad-bf5b-7d1586c1f00e req-63a3627e-cc83-4cf9-9067-cacc96eeaf78 service nova] Acquired lock "refresh_cache-9a7f4452-ae50-4779-8474-11d3a6d3533f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1927.342891] env[63024]: DEBUG nova.network.neutron [req-915c9c37-e9a8-4dad-bf5b-7d1586c1f00e req-63a3627e-cc83-4cf9-9067-cacc96eeaf78 service nova] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Refreshing network info cache for port 42f23b07-4f23-454c-bdba-e075cd549205 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1927.366576] env[63024]: DEBUG oslo_vmware.api [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951318, 'name': ReconfigVM_Task, 'duration_secs': 0.581727} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1927.366576] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Reconfigured VM instance instance-00000048 to attach disk [datastore1] 01b8072a-4483-4932-8294-7e5b48e6b203/2646ca61-612e-4bc3-97f7-ee492c048835-rescue.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1927.367160] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f78c4884-a5f7-48b8-891e-fe362ba98509 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.400079] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1b991ef5-dccb-463c-bd89-ea1c0368f8d6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.419911] env[63024]: DEBUG oslo_vmware.api [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1927.419911] env[63024]: value = "task-1951324" [ 1927.419911] env[63024]: _type = "Task" [ 1927.419911] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1927.431381] env[63024]: DEBUG oslo_vmware.api [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951324, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.580829] env[63024]: DEBUG nova.compute.manager [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1927.620995] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951319, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.597419} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1927.622232] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 669c45b0-34d6-45f8-a30e-b9b96cfd71ef/669c45b0-34d6-45f8-a30e-b9b96cfd71ef.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1927.622453] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1927.622714] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-996a30d8-241d-4008-a0d5-f323f8bbfa6e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.633300] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Waiting for the task: (returnval){ [ 1927.633300] env[63024]: value = "task-1951325" [ 1927.633300] env[63024]: _type = "Task" [ 1927.633300] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1927.642749] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951325, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.643665] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.570s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1927.644188] env[63024]: DEBUG nova.compute.manager [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1927.646867] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 19.053s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1927.647183] env[63024]: DEBUG nova.objects.instance [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63024) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1927.664702] env[63024]: DEBUG nova.network.neutron [req-4fc4e7bf-75b4-4ce6-a384-b438bf0e29d8 req-59d82095-8a01-4303-ab5a-ef5ab387d9bf service nova] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Updated VIF entry in instance network info cache for port 14537c97-623f-4d93-80a9-8cd7457a0a75. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1927.664808] env[63024]: DEBUG nova.network.neutron [req-4fc4e7bf-75b4-4ce6-a384-b438bf0e29d8 req-59d82095-8a01-4303-ab5a-ef5ab387d9bf service nova] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Updating instance_info_cache with network_info: [{"id": "14537c97-623f-4d93-80a9-8cd7457a0a75", "address": "fa:16:3e:35:bc:c8", "network": {"id": "9e97434d-c36c-478d-a559-df5b5d8bcd77", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-630350621-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a1166551532c473ca470379b16664513", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4055505f-97ab-400b-969c-43d99b38fd48", "external-id": "nsx-vlan-transportzone-952", "segmentation_id": 952, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap14537c97-62", "ovs_interfaceid": "14537c97-623f-4d93-80a9-8cd7457a0a75", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1927.702940] env[63024]: DEBUG oslo_vmware.api [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951320, 'name': PowerOffVM_Task, 'duration_secs': 0.687931} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1927.703339] env[63024]: DEBUG oslo_vmware.api [None req-f2a8d614-8a43-4c00-9803-b4fcff4618da tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951321, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.703632] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1927.704024] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1927.705199] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0c0d77e-358f-4137-b600-9ae510b725bb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.712974] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1927.713262] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7ddd46ee-462b-49ea-aa4a-ecf244a0ba85 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.816689] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951322, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.837118] env[63024]: DEBUG oslo_vmware.api [None req-754cf9a4-b4ef-4ef4-aa73-f36ae6252b97 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951323, 'name': PowerOffVM_Task, 'duration_secs': 0.544048} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1927.837441] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-754cf9a4-b4ef-4ef4-aa73-f36ae6252b97 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1927.837687] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-754cf9a4-b4ef-4ef4-aa73-f36ae6252b97 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1927.837917] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e4721135-7406-4efd-bd2e-3e3c9ffe5756 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.849429] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1927.850314] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1927.850314] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Deleting the datastore file [datastore1] 0f371c69-c7ae-4649-b038-be82e8ca74e1 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1927.850605] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-54114871-f004-4618-9ff7-dc6e1e9a8bad {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1927.862758] env[63024]: DEBUG oslo_vmware.api [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 1927.862758] env[63024]: value = "task-1951328" [ 1927.862758] env[63024]: _type = "Task" [ 1927.862758] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1927.871965] env[63024]: DEBUG oslo_vmware.api [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951328, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1927.932057] env[63024]: DEBUG oslo_vmware.api [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951324, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1928.074794] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-754cf9a4-b4ef-4ef4-aa73-f36ae6252b97 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1928.075120] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-754cf9a4-b4ef-4ef4-aa73-f36ae6252b97 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1928.075391] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-754cf9a4-b4ef-4ef4-aa73-f36ae6252b97 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Deleting the datastore file [datastore1] a0a9ea07-dda8-45b4-bab9-cdaf683c0a21 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1928.075677] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-66f90ae8-6172-4c32-b70c-874d24a4d903 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.089284] env[63024]: DEBUG oslo_vmware.api [None req-754cf9a4-b4ef-4ef4-aa73-f36ae6252b97 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for the task: (returnval){ [ 1928.089284] env[63024]: value = "task-1951329" [ 1928.089284] env[63024]: _type = "Task" [ 1928.089284] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1928.103241] env[63024]: DEBUG oslo_vmware.api [None req-754cf9a4-b4ef-4ef4-aa73-f36ae6252b97 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951329, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1928.109997] env[63024]: DEBUG oslo_concurrency.lockutils [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1928.146175] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951325, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082809} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1928.150033] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1928.151582] env[63024]: DEBUG nova.compute.utils [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1928.157738] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88ac2864-51d6-4f64-b92f-cd507e40d055 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.162601] env[63024]: DEBUG nova.compute.manager [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1928.162937] env[63024]: DEBUG nova.network.neutron [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1928.167904] env[63024]: DEBUG oslo_concurrency.lockutils [req-4fc4e7bf-75b4-4ce6-a384-b438bf0e29d8 req-59d82095-8a01-4303-ab5a-ef5ab387d9bf service nova] Releasing lock "refresh_cache-96afa44e-d8c6-419c-ae69-04b7b306c2c5" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1928.194108] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] Reconfiguring VM instance instance-00000053 to attach disk [datastore1] 669c45b0-34d6-45f8-a30e-b9b96cfd71ef/669c45b0-34d6-45f8-a30e-b9b96cfd71ef.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1928.197863] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3a8c683b-b9c0-4c51-a2a7-82e83be68381 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.218868] env[63024]: DEBUG nova.policy [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f215c99539cd43039ffdb0c6cf70beaf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0d0715f0ccbd49ec8af8e3049d970994', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1928.222204] env[63024]: DEBUG nova.network.neutron [req-915c9c37-e9a8-4dad-bf5b-7d1586c1f00e req-63a3627e-cc83-4cf9-9067-cacc96eeaf78 service nova] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Updated VIF entry in instance network info cache for port 42f23b07-4f23-454c-bdba-e075cd549205. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1928.222854] env[63024]: DEBUG nova.network.neutron [req-915c9c37-e9a8-4dad-bf5b-7d1586c1f00e req-63a3627e-cc83-4cf9-9067-cacc96eeaf78 service nova] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Updating instance_info_cache with network_info: [{"id": "989997b7-12bd-4924-97e2-a65914c47536", "address": "fa:16:3e:3b:9f:01", "network": {"id": "ffb24eaf-c6b6-414f-a69a-0c8806712ddd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-281590202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.234", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9521048e807c4ca2a6d2e74a72b829a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap989997b7-12", "ovs_interfaceid": "989997b7-12bd-4924-97e2-a65914c47536", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "42f23b07-4f23-454c-bdba-e075cd549205", "address": "fa:16:3e:e2:e8:c5", "network": {"id": "ffb24eaf-c6b6-414f-a69a-0c8806712ddd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-281590202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9521048e807c4ca2a6d2e74a72b829a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap42f23b07-4f", "ovs_interfaceid": "42f23b07-4f23-454c-bdba-e075cd549205", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1928.234853] env[63024]: DEBUG oslo_vmware.api [None req-f2a8d614-8a43-4c00-9803-b4fcff4618da tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951321, 'name': ReconfigVM_Task, 'duration_secs': 1.503434} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1928.236702] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f2a8d614-8a43-4c00-9803-b4fcff4618da tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Releasing lock "9a7f4452-ae50-4779-8474-11d3a6d3533f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1928.236983] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f2a8d614-8a43-4c00-9803-b4fcff4618da tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Reconfigured VM to attach interface {{(pid=63024) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1928.239707] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Waiting for the task: (returnval){ [ 1928.239707] env[63024]: value = "task-1951330" [ 1928.239707] env[63024]: _type = "Task" [ 1928.239707] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1928.250894] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951330, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1928.320137] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951322, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1928.375803] env[63024]: DEBUG oslo_vmware.api [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951328, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.293275} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1928.376162] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1928.376368] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1928.376560] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1928.439608] env[63024]: DEBUG oslo_vmware.api [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951324, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1928.567668] env[63024]: DEBUG nova.network.neutron [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Successfully created port: 95e5d41a-5998-4d48-9aec-6255c74c448f {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1928.607532] env[63024]: DEBUG oslo_vmware.api [None req-754cf9a4-b4ef-4ef4-aa73-f36ae6252b97 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Task: {'id': task-1951329, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.251838} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1928.608039] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-754cf9a4-b4ef-4ef4-aa73-f36ae6252b97 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1928.608410] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-754cf9a4-b4ef-4ef4-aa73-f36ae6252b97 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1928.608722] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-754cf9a4-b4ef-4ef4-aa73-f36ae6252b97 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1928.609052] env[63024]: INFO nova.compute.manager [None req-754cf9a4-b4ef-4ef4-aa73-f36ae6252b97 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Took 1.81 seconds to destroy the instance on the hypervisor. [ 1928.609505] env[63024]: DEBUG oslo.service.loopingcall [None req-754cf9a4-b4ef-4ef4-aa73-f36ae6252b97 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1928.609826] env[63024]: DEBUG nova.compute.manager [-] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1928.609986] env[63024]: DEBUG nova.network.neutron [-] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1928.663579] env[63024]: DEBUG nova.compute.manager [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1928.667758] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ddd13f66-e4e7-4de5-8aa7-310ff216f90a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.021s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1928.669326] env[63024]: DEBUG oslo_concurrency.lockutils [None req-237bc6e2-b9d3-4eab-86ae-858f8d0a2314 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.750s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1928.669662] env[63024]: DEBUG nova.objects.instance [None req-237bc6e2-b9d3-4eab-86ae-858f8d0a2314 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Lazy-loading 'resources' on Instance uuid ac60546a-37b2-4d2a-8505-61fe202e2ed0 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1928.726488] env[63024]: DEBUG oslo_concurrency.lockutils [req-915c9c37-e9a8-4dad-bf5b-7d1586c1f00e req-63a3627e-cc83-4cf9-9067-cacc96eeaf78 service nova] Releasing lock "refresh_cache-9a7f4452-ae50-4779-8474-11d3a6d3533f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1928.742549] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f2a8d614-8a43-4c00-9803-b4fcff4618da tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "interface-9a7f4452-ae50-4779-8474-11d3a6d3533f-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.179s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1928.754788] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951330, 'name': ReconfigVM_Task, 'duration_secs': 0.49933} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1928.755092] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] Reconfigured VM instance instance-00000053 to attach disk [datastore1] 669c45b0-34d6-45f8-a30e-b9b96cfd71ef/669c45b0-34d6-45f8-a30e-b9b96cfd71ef.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1928.756708] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-db7c5749-203e-4d28-b60f-57b3af9beda7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.764349] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Waiting for the task: (returnval){ [ 1928.764349] env[63024]: value = "task-1951331" [ 1928.764349] env[63024]: _type = "Task" [ 1928.764349] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1928.777057] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951331, 'name': Rename_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1928.824842] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951322, 'name': CreateVM_Task, 'duration_secs': 1.674103} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1928.825264] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1928.826343] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1928.828020] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1928.828020] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1928.828020] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-35d9149a-3f58-467f-bc22-e82326850f49 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.837520] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Waiting for the task: (returnval){ [ 1928.837520] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52de865d-7c28-a8a6-a7bc-19e9a4859437" [ 1928.837520] env[63024]: _type = "Task" [ 1928.837520] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1928.848621] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52de865d-7c28-a8a6-a7bc-19e9a4859437, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1928.934686] env[63024]: DEBUG oslo_vmware.api [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951324, 'name': ReconfigVM_Task, 'duration_secs': 1.218057} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1928.934895] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1928.936057] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0ab61f3c-15ba-4afd-9107-67d0f02005e6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1928.943739] env[63024]: DEBUG oslo_vmware.api [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1928.943739] env[63024]: value = "task-1951332" [ 1928.943739] env[63024]: _type = "Task" [ 1928.943739] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1928.954360] env[63024]: DEBUG oslo_vmware.api [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951332, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.278893] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951331, 'name': Rename_Task, 'duration_secs': 0.323956} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1929.281873] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1929.282533] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-02829149-c245-4ee7-b924-d174ca8b9eea {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.291975] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Waiting for the task: (returnval){ [ 1929.291975] env[63024]: value = "task-1951333" [ 1929.291975] env[63024]: _type = "Task" [ 1929.291975] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1929.303651] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951333, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.346039] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52de865d-7c28-a8a6-a7bc-19e9a4859437, 'name': SearchDatastore_Task, 'duration_secs': 0.014242} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1929.349096] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1929.349361] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1929.349621] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1929.349883] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1929.350406] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1929.351521] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dbba793e-d8bc-4ae7-805f-60700ec58f02 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.361848] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1929.362118] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1929.365873] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96398ef7-eb12-43d3-8a9f-a7603acf53f4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.373024] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Waiting for the task: (returnval){ [ 1929.373024] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52bf97a0-41b2-363d-5cc4-41042b2a8e0f" [ 1929.373024] env[63024]: _type = "Task" [ 1929.373024] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1929.381619] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52bf97a0-41b2-363d-5cc4-41042b2a8e0f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.411104] env[63024]: DEBUG nova.virt.hardware [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1929.411383] env[63024]: DEBUG nova.virt.hardware [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1929.411610] env[63024]: DEBUG nova.virt.hardware [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1929.411760] env[63024]: DEBUG nova.virt.hardware [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1929.411934] env[63024]: DEBUG nova.virt.hardware [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1929.412100] env[63024]: DEBUG nova.virt.hardware [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1929.412769] env[63024]: DEBUG nova.virt.hardware [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1929.412769] env[63024]: DEBUG nova.virt.hardware [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1929.412769] env[63024]: DEBUG nova.virt.hardware [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1929.412993] env[63024]: DEBUG nova.virt.hardware [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1929.413538] env[63024]: DEBUG nova.virt.hardware [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1929.416164] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54f3842b-5481-45b6-8bae-591dc521ca53 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.430056] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1110f416-237e-4ff7-b003-3f70021f15d9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.450435] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a7:20:a6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f3ccbdbb-8b49-4a26-913f-2a448b72280f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c48bb2e4-b1fe-46e8-9eaf-75fcb7827e8c', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1929.459473] env[63024]: DEBUG oslo.service.loopingcall [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1929.466560] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1929.467144] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bc29b283-0a03-46df-998c-6c06fd1d6add {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.486389] env[63024]: DEBUG nova.network.neutron [-] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1929.494777] env[63024]: DEBUG oslo_vmware.api [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951332, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.497040] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1929.497040] env[63024]: value = "task-1951334" [ 1929.497040] env[63024]: _type = "Task" [ 1929.497040] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1929.506764] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951334, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.645773] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5adf87f-c600-472c-b9a0-4a682c9ce19c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.655035] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aee301ab-3126-4780-83a4-fb6fcc40a257 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.693038] env[63024]: DEBUG nova.compute.manager [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1929.696751] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11fca18f-11f3-4f92-b402-747d76e5e079 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.704825] env[63024]: DEBUG nova.compute.manager [req-d085125a-c164-4e7d-83c7-494fc6a35e54 req-77713309-aed7-4373-b3d2-9d4be3a86a30 service nova] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Received event network-vif-deleted-6e038615-4146-41f3-9011-c4aaf6ffe845 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1929.711576] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f35acdd-ead1-4067-b3f9-e564f0bbf1fe {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.729465] env[63024]: DEBUG nova.compute.provider_tree [None req-237bc6e2-b9d3-4eab-86ae-858f8d0a2314 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1929.733190] env[63024]: DEBUG nova.virt.hardware [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1929.733437] env[63024]: DEBUG nova.virt.hardware [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1929.733595] env[63024]: DEBUG nova.virt.hardware [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1929.733774] env[63024]: DEBUG nova.virt.hardware [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1929.734099] env[63024]: DEBUG nova.virt.hardware [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1929.734099] env[63024]: DEBUG nova.virt.hardware [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1929.734277] env[63024]: DEBUG nova.virt.hardware [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1929.734437] env[63024]: DEBUG nova.virt.hardware [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1929.734606] env[63024]: DEBUG nova.virt.hardware [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1929.734768] env[63024]: DEBUG nova.virt.hardware [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1929.734945] env[63024]: DEBUG nova.virt.hardware [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1929.735805] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5036aa3f-d2b0-42ae-a772-fbc8856d66c4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.746213] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37d77411-4126-4404-9ded-33d4900d6d85 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.769675] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ba82cd5d-9aa6-4970-b1b2-1b173c8b0213 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "c28e7c21-7e7d-4cda-81e8-63538bd8a1f7" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1929.769675] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ba82cd5d-9aa6-4970-b1b2-1b173c8b0213 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "c28e7c21-7e7d-4cda-81e8-63538bd8a1f7" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1929.805954] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951333, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.884343] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52bf97a0-41b2-363d-5cc4-41042b2a8e0f, 'name': SearchDatastore_Task, 'duration_secs': 0.024263} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1929.885365] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e72bba6f-07ec-4273-8954-b610a302a430 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.891340] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Waiting for the task: (returnval){ [ 1929.891340] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52ff64d6-16fd-5ec0-fbde-6d96a857d0f5" [ 1929.891340] env[63024]: _type = "Task" [ 1929.891340] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1929.902254] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52ff64d6-16fd-5ec0-fbde-6d96a857d0f5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1929.960793] env[63024]: DEBUG oslo_vmware.api [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951332, 'name': PowerOnVM_Task, 'duration_secs': 0.907432} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1929.961102] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1929.963975] env[63024]: DEBUG nova.compute.manager [None req-1366f41c-1bce-4d0e-8865-b412544a6932 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1929.964780] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51072c50-03dc-4db5-ba5c-8ae81426b364 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1929.987833] env[63024]: INFO nova.compute.manager [-] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Took 1.38 seconds to deallocate network for instance. [ 1930.008270] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951334, 'name': CreateVM_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.132146] env[63024]: DEBUG nova.network.neutron [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Successfully updated port: 95e5d41a-5998-4d48-9aec-6255c74c448f {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1930.240684] env[63024]: DEBUG nova.scheduler.client.report [None req-237bc6e2-b9d3-4eab-86ae-858f8d0a2314 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1930.273198] env[63024]: DEBUG nova.compute.utils [None req-ba82cd5d-9aa6-4970-b1b2-1b173c8b0213 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1930.306324] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951333, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.403102] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52ff64d6-16fd-5ec0-fbde-6d96a857d0f5, 'name': SearchDatastore_Task, 'duration_secs': 0.011931} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1930.405055] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1930.405055] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 96afa44e-d8c6-419c-ae69-04b7b306c2c5/96afa44e-d8c6-419c-ae69-04b7b306c2c5.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1930.405055] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-81601f3f-e1fc-41de-88fb-1a4c96249e45 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.414893] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Waiting for the task: (returnval){ [ 1930.414893] env[63024]: value = "task-1951335" [ 1930.414893] env[63024]: _type = "Task" [ 1930.414893] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1930.427597] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951335, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.494341] env[63024]: DEBUG oslo_concurrency.lockutils [None req-754cf9a4-b4ef-4ef4-aa73-f36ae6252b97 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1930.510552] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951334, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1930.636760] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquiring lock "refresh_cache-ea24d375-ba88-42ca-a07e-52000ec613c0" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1930.636760] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquired lock "refresh_cache-ea24d375-ba88-42ca-a07e-52000ec613c0" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1930.636760] env[63024]: DEBUG nova.network.neutron [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1930.651944] env[63024]: DEBUG nova.compute.manager [req-d46242b3-825c-40d7-880f-b05ee32b92b6 req-425525aa-5353-483f-b40c-7650fb2c1e1f service nova] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Received event network-vif-plugged-95e5d41a-5998-4d48-9aec-6255c74c448f {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1930.652235] env[63024]: DEBUG oslo_concurrency.lockutils [req-d46242b3-825c-40d7-880f-b05ee32b92b6 req-425525aa-5353-483f-b40c-7650fb2c1e1f service nova] Acquiring lock "ea24d375-ba88-42ca-a07e-52000ec613c0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1930.652508] env[63024]: DEBUG oslo_concurrency.lockutils [req-d46242b3-825c-40d7-880f-b05ee32b92b6 req-425525aa-5353-483f-b40c-7650fb2c1e1f service nova] Lock "ea24d375-ba88-42ca-a07e-52000ec613c0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1930.656711] env[63024]: DEBUG oslo_concurrency.lockutils [req-d46242b3-825c-40d7-880f-b05ee32b92b6 req-425525aa-5353-483f-b40c-7650fb2c1e1f service nova] Lock "ea24d375-ba88-42ca-a07e-52000ec613c0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1930.656711] env[63024]: DEBUG nova.compute.manager [req-d46242b3-825c-40d7-880f-b05ee32b92b6 req-425525aa-5353-483f-b40c-7650fb2c1e1f service nova] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] No waiting events found dispatching network-vif-plugged-95e5d41a-5998-4d48-9aec-6255c74c448f {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1930.656711] env[63024]: WARNING nova.compute.manager [req-d46242b3-825c-40d7-880f-b05ee32b92b6 req-425525aa-5353-483f-b40c-7650fb2c1e1f service nova] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Received unexpected event network-vif-plugged-95e5d41a-5998-4d48-9aec-6255c74c448f for instance with vm_state building and task_state spawning. [ 1930.748870] env[63024]: DEBUG oslo_concurrency.lockutils [None req-237bc6e2-b9d3-4eab-86ae-858f8d0a2314 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.079s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1930.752906] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e9670a20-73a3-40df-8c25-6a89af387d93 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.492s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1930.753190] env[63024]: DEBUG nova.objects.instance [None req-e9670a20-73a3-40df-8c25-6a89af387d93 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Lazy-loading 'resources' on Instance uuid 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1930.776960] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ba82cd5d-9aa6-4970-b1b2-1b173c8b0213 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "c28e7c21-7e7d-4cda-81e8-63538bd8a1f7" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1930.779179] env[63024]: INFO nova.scheduler.client.report [None req-237bc6e2-b9d3-4eab-86ae-858f8d0a2314 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Deleted allocations for instance ac60546a-37b2-4d2a-8505-61fe202e2ed0 [ 1930.808976] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951333, 'name': PowerOnVM_Task, 'duration_secs': 1.239389} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1930.809649] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1930.809803] env[63024]: INFO nova.compute.manager [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] Took 9.57 seconds to spawn the instance on the hypervisor. [ 1930.809952] env[63024]: DEBUG nova.compute.manager [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1930.810817] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ce8b30e-b579-44aa-b53f-97faf4fb7e70 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1930.926949] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951335, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1931.011277] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951334, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1931.177517] env[63024]: DEBUG nova.network.neutron [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1931.289501] env[63024]: DEBUG oslo_concurrency.lockutils [None req-237bc6e2-b9d3-4eab-86ae-858f8d0a2314 tempest-ListServerFiltersTestJSON-1366755677 tempest-ListServerFiltersTestJSON-1366755677-project-member] Lock "ac60546a-37b2-4d2a-8505-61fe202e2ed0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.033s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1931.335385] env[63024]: INFO nova.compute.manager [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] Took 35.86 seconds to build instance. [ 1931.342147] env[63024]: DEBUG nova.network.neutron [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Updating instance_info_cache with network_info: [{"id": "95e5d41a-5998-4d48-9aec-6255c74c448f", "address": "fa:16:3e:c2:99:b7", "network": {"id": "18684658-e754-4649-b059-43f84e447803", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-48651862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d0715f0ccbd49ec8af8e3049d970994", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98011432-48cc-4ffd-a5a8-b96d2ea4424a", "external-id": "nsx-vlan-transportzone-745", "segmentation_id": 745, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap95e5d41a-59", "ovs_interfaceid": "95e5d41a-5998-4d48-9aec-6255c74c448f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1931.431794] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951335, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.683375} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1931.434295] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 96afa44e-d8c6-419c-ae69-04b7b306c2c5/96afa44e-d8c6-419c-ae69-04b7b306c2c5.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1931.434295] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1931.434295] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9107c0fe-83b9-48de-bd7e-1588ebb6cd40 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.449811] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Waiting for the task: (returnval){ [ 1931.449811] env[63024]: value = "task-1951336" [ 1931.449811] env[63024]: _type = "Task" [ 1931.449811] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1931.462087] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951336, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1931.510547] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951334, 'name': CreateVM_Task, 'duration_secs': 1.837034} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1931.510720] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1931.514282] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1931.514282] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1931.514565] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1931.515475] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d85ea7fd-2c83-48c9-ae5a-7cb6e87605c0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.522317] env[63024]: DEBUG oslo_vmware.api [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 1931.522317] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52dccab2-8424-8292-189c-21770fcb6ede" [ 1931.522317] env[63024]: _type = "Task" [ 1931.522317] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1931.535563] env[63024]: DEBUG oslo_vmware.api [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52dccab2-8424-8292-189c-21770fcb6ede, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1931.729459] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72c9fa95-a50d-4cb9-92dd-7fad38915b71 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.739099] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb8bcbd8-0b7d-4b95-bb25-35853c4be463 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.776698] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06a484bb-dca2-466b-896b-c6a17d7617a0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.786281] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55e1076c-16a8-4151-8a47-d9746f8195be {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.801255] env[63024]: DEBUG nova.compute.provider_tree [None req-e9670a20-73a3-40df-8c25-6a89af387d93 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1931.836586] env[63024]: DEBUG oslo_concurrency.lockutils [None req-020be388-2599-472a-96af-9116384d5939 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "interface-9a7f4452-ae50-4779-8474-11d3a6d3533f-16467e49-8cf6-465f-98d5-471892d9f322" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1931.836819] env[63024]: DEBUG oslo_concurrency.lockutils [None req-020be388-2599-472a-96af-9116384d5939 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "interface-9a7f4452-ae50-4779-8474-11d3a6d3533f-16467e49-8cf6-465f-98d5-471892d9f322" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1931.837248] env[63024]: DEBUG nova.objects.instance [None req-020be388-2599-472a-96af-9116384d5939 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lazy-loading 'flavor' on Instance uuid 9a7f4452-ae50-4779-8474-11d3a6d3533f {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1931.838705] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Lock "669c45b0-34d6-45f8-a30e-b9b96cfd71ef" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.369s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1931.847573] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Releasing lock "refresh_cache-ea24d375-ba88-42ca-a07e-52000ec613c0" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1931.847882] env[63024]: DEBUG nova.compute.manager [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Instance network_info: |[{"id": "95e5d41a-5998-4d48-9aec-6255c74c448f", "address": "fa:16:3e:c2:99:b7", "network": {"id": "18684658-e754-4649-b059-43f84e447803", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-48651862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d0715f0ccbd49ec8af8e3049d970994", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98011432-48cc-4ffd-a5a8-b96d2ea4424a", "external-id": "nsx-vlan-transportzone-745", "segmentation_id": 745, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap95e5d41a-59", "ovs_interfaceid": "95e5d41a-5998-4d48-9aec-6255c74c448f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1931.848376] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c2:99:b7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '98011432-48cc-4ffd-a5a8-b96d2ea4424a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '95e5d41a-5998-4d48-9aec-6255c74c448f', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1931.856312] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Creating folder: Project (0d0715f0ccbd49ec8af8e3049d970994). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1931.857298] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-351387f8-91ec-4528-901b-8fd244a4b1b7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.871832] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Created folder: Project (0d0715f0ccbd49ec8af8e3049d970994) in parent group-v401959. [ 1931.872271] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Creating folder: Instances. Parent ref: group-v402184. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1931.872375] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8903d46a-e351-4d10-adc1-ab08946f497e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.880613] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ba82cd5d-9aa6-4970-b1b2-1b173c8b0213 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "c28e7c21-7e7d-4cda-81e8-63538bd8a1f7" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1931.882279] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ba82cd5d-9aa6-4970-b1b2-1b173c8b0213 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "c28e7c21-7e7d-4cda-81e8-63538bd8a1f7" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1931.882279] env[63024]: INFO nova.compute.manager [None req-ba82cd5d-9aa6-4970-b1b2-1b173c8b0213 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Attaching volume 2cfc46b0-10e9-4f4c-8f58-7fff36954695 to /dev/sdb [ 1931.889284] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Created folder: Instances in parent group-v402184. [ 1931.890156] env[63024]: DEBUG oslo.service.loopingcall [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1931.893143] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1931.894221] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-81e4aebe-1b36-4c62-85c3-ffbe1fd058d9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.921777] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1931.921777] env[63024]: value = "task-1951339" [ 1931.921777] env[63024]: _type = "Task" [ 1931.921777] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1931.933378] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951339, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1931.937541] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffe0a1ea-3660-4c06-9968-48732d2e7f62 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.945419] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f920ca0a-fcbe-466e-a1ba-c238195f7046 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.957026] env[63024]: DEBUG oslo_vmware.rw_handles [None req-758ca82e-1e5b-4167-8551-43564c654821 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c55f24-570f-9575-9046-6712dd20916c/disk-0.vmdk. {{(pid=63024) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1931.957880] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81d1a569-a9fc-46ae-97d1-bbd7fd5aeae0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.969131] env[63024]: DEBUG nova.virt.block_device [None req-ba82cd5d-9aa6-4970-b1b2-1b173c8b0213 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Updating existing volume attachment record: 9c668fae-1400-4d92-b61e-77f75123aff9 {{(pid=63024) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1931.971365] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951336, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083396} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1931.975157] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1931.975157] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df90e7e4-7dc3-4ef7-9e75-6acd6a227923 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.978085] env[63024]: DEBUG oslo_vmware.rw_handles [None req-758ca82e-1e5b-4167-8551-43564c654821 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c55f24-570f-9575-9046-6712dd20916c/disk-0.vmdk is in state: ready. {{(pid=63024) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1931.978085] env[63024]: ERROR oslo_vmware.rw_handles [None req-758ca82e-1e5b-4167-8551-43564c654821 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c55f24-570f-9575-9046-6712dd20916c/disk-0.vmdk due to incomplete transfer. [ 1931.978598] env[63024]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-5b869d92-146f-4d6b-8716-d8040df26f0f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1931.999639] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Reconfiguring VM instance instance-00000054 to attach disk [datastore1] 96afa44e-d8c6-419c-ae69-04b7b306c2c5/96afa44e-d8c6-419c-ae69-04b7b306c2c5.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1932.001170] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ccaaba2f-44bf-4d71-90a2-575ae9cb4902 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.018865] env[63024]: DEBUG oslo_vmware.rw_handles [None req-758ca82e-1e5b-4167-8551-43564c654821 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52c55f24-570f-9575-9046-6712dd20916c/disk-0.vmdk. {{(pid=63024) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1932.019249] env[63024]: DEBUG nova.virt.vmwareapi.images [None req-758ca82e-1e5b-4167-8551-43564c654821 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Uploaded image 01cd4193-31a8-4abd-82db-4ca40cdf42a3 to the Glance image server {{(pid=63024) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1932.021467] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-758ca82e-1e5b-4167-8551-43564c654821 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Destroying the VM {{(pid=63024) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1932.021748] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-c3ddbd86-e033-4629-98c1-34b723dbf621 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.030671] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Waiting for the task: (returnval){ [ 1932.030671] env[63024]: value = "task-1951340" [ 1932.030671] env[63024]: _type = "Task" [ 1932.030671] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1932.039311] env[63024]: DEBUG oslo_vmware.api [None req-758ca82e-1e5b-4167-8551-43564c654821 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 1932.039311] env[63024]: value = "task-1951341" [ 1932.039311] env[63024]: _type = "Task" [ 1932.039311] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1932.040080] env[63024]: DEBUG oslo_vmware.api [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52dccab2-8424-8292-189c-21770fcb6ede, 'name': SearchDatastore_Task, 'duration_secs': 0.031047} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1932.040829] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1932.041107] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1932.041783] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1932.041783] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1932.041783] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1932.045040] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e8325a59-71b6-4de7-840e-48d562d5993b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.050721] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951340, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.059100] env[63024]: DEBUG oslo_vmware.api [None req-758ca82e-1e5b-4167-8551-43564c654821 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951341, 'name': Destroy_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.063942] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1932.064366] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1932.065250] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb93495d-d559-4ad1-8ca3-8d29b73ba6c0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.072319] env[63024]: DEBUG oslo_vmware.api [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 1932.072319] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]527dea40-3f1e-2c30-2818-60ab51c2d31e" [ 1932.072319] env[63024]: _type = "Task" [ 1932.072319] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1932.082392] env[63024]: DEBUG oslo_vmware.api [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]527dea40-3f1e-2c30-2818-60ab51c2d31e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.305141] env[63024]: DEBUG nova.scheduler.client.report [None req-e9670a20-73a3-40df-8c25-6a89af387d93 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1932.389203] env[63024]: INFO nova.compute.manager [None req-777e2363-2236-48e0-946c-05ee669393fb tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Unrescuing [ 1932.389626] env[63024]: DEBUG oslo_concurrency.lockutils [None req-777e2363-2236-48e0-946c-05ee669393fb tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquiring lock "refresh_cache-01b8072a-4483-4932-8294-7e5b48e6b203" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1932.389804] env[63024]: DEBUG oslo_concurrency.lockutils [None req-777e2363-2236-48e0-946c-05ee669393fb tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquired lock "refresh_cache-01b8072a-4483-4932-8294-7e5b48e6b203" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1932.390068] env[63024]: DEBUG nova.network.neutron [None req-777e2363-2236-48e0-946c-05ee669393fb tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1932.439020] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951339, 'name': CreateVM_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.459017] env[63024]: DEBUG nova.objects.instance [None req-020be388-2599-472a-96af-9116384d5939 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lazy-loading 'pci_requests' on Instance uuid 9a7f4452-ae50-4779-8474-11d3a6d3533f {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1932.542233] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951340, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.551448] env[63024]: DEBUG oslo_vmware.api [None req-758ca82e-1e5b-4167-8551-43564c654821 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951341, 'name': Destroy_Task} progress is 33%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.582909] env[63024]: DEBUG oslo_vmware.api [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]527dea40-3f1e-2c30-2818-60ab51c2d31e, 'name': SearchDatastore_Task, 'duration_secs': 0.019931} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1932.584114] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5520f62b-5f89-4716-88c9-49713de7972d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1932.591755] env[63024]: DEBUG oslo_vmware.api [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 1932.591755] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]527e35ac-8ab4-efcd-7ced-bbb641fdd304" [ 1932.591755] env[63024]: _type = "Task" [ 1932.591755] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1932.601084] env[63024]: DEBUG oslo_vmware.api [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]527e35ac-8ab4-efcd-7ced-bbb641fdd304, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.810344] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e9670a20-73a3-40df-8c25-6a89af387d93 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.057s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1932.812900] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4a41005a-6633-4657-bf89-41217217cc32 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.860s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1932.813163] env[63024]: DEBUG nova.objects.instance [None req-4a41005a-6633-4657-bf89-41217217cc32 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Lazy-loading 'resources' on Instance uuid 43cdc362-588f-42cc-a4b2-a08fe60293a5 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1932.834968] env[63024]: INFO nova.scheduler.client.report [None req-e9670a20-73a3-40df-8c25-6a89af387d93 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Deleted allocations for instance 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc [ 1932.936655] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951339, 'name': CreateVM_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1932.962802] env[63024]: DEBUG nova.objects.base [None req-020be388-2599-472a-96af-9116384d5939 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Object Instance<9a7f4452-ae50-4779-8474-11d3a6d3533f> lazy-loaded attributes: flavor,pci_requests {{(pid=63024) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1932.963217] env[63024]: DEBUG nova.network.neutron [None req-020be388-2599-472a-96af-9116384d5939 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1933.043269] env[63024]: DEBUG nova.policy [None req-020be388-2599-472a-96af-9116384d5939 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6fc84a6eed984429b26a693ce7b0876e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9521048e807c4ca2a6d2e74a72b829a3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1933.060274] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951340, 'name': ReconfigVM_Task, 'duration_secs': 1.013329} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1933.064211] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Reconfigured VM instance instance-00000054 to attach disk [datastore1] 96afa44e-d8c6-419c-ae69-04b7b306c2c5/96afa44e-d8c6-419c-ae69-04b7b306c2c5.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1933.066637] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1c1f0e57-f0d7-4b57-9799-7759cc2aeac6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.074319] env[63024]: DEBUG oslo_vmware.api [None req-758ca82e-1e5b-4167-8551-43564c654821 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951341, 'name': Destroy_Task, 'duration_secs': 0.679097} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1933.079024] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-758ca82e-1e5b-4167-8551-43564c654821 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Destroyed the VM [ 1933.079024] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-758ca82e-1e5b-4167-8551-43564c654821 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Deleting Snapshot of the VM instance {{(pid=63024) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1933.079024] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-9aa7d73b-b020-45cb-8ac5-bd97b506d0a7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.080722] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Waiting for the task: (returnval){ [ 1933.080722] env[63024]: value = "task-1951345" [ 1933.080722] env[63024]: _type = "Task" [ 1933.080722] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1933.088309] env[63024]: DEBUG oslo_vmware.api [None req-758ca82e-1e5b-4167-8551-43564c654821 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 1933.088309] env[63024]: value = "task-1951346" [ 1933.088309] env[63024]: _type = "Task" [ 1933.088309] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1933.097104] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951345, 'name': Rename_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.110990] env[63024]: DEBUG oslo_vmware.api [None req-758ca82e-1e5b-4167-8551-43564c654821 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951346, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.113484] env[63024]: DEBUG oslo_vmware.api [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]527e35ac-8ab4-efcd-7ced-bbb641fdd304, 'name': SearchDatastore_Task, 'duration_secs': 0.017281} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1933.113943] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1933.114408] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 0f371c69-c7ae-4649-b038-be82e8ca74e1/0f371c69-c7ae-4649-b038-be82e8ca74e1.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1933.114759] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-36b1900f-8424-4ec2-b1cf-fad4d5680078 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.124066] env[63024]: DEBUG oslo_vmware.api [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 1933.124066] env[63024]: value = "task-1951347" [ 1933.124066] env[63024]: _type = "Task" [ 1933.124066] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1933.136575] env[63024]: DEBUG oslo_vmware.api [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951347, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.349164] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e9670a20-73a3-40df-8c25-6a89af387d93 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Lock "9ca6342c-55bd-4c78-9fa6-3caf4ec744bc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.159s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1933.439047] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951339, 'name': CreateVM_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.448558] env[63024]: DEBUG nova.network.neutron [None req-777e2363-2236-48e0-946c-05ee669393fb tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Updating instance_info_cache with network_info: [{"id": "7d713c35-a0bd-4dd8-b1a9-b838b3bc7a46", "address": "fa:16:3e:9e:4f:a9", "network": {"id": "20e77237-56c4-40bb-8203-aa035239c938", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1073367986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.156", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12d782556c614caf84a51b37fa43b5de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a64108f9-df0a-4feb-bbb5-97f5841c356c", "external-id": "nsx-vlan-transportzone-67", "segmentation_id": 67, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d713c35-a0", "ovs_interfaceid": "7d713c35-a0bd-4dd8-b1a9-b838b3bc7a46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1933.452811] env[63024]: DEBUG nova.compute.manager [req-e845024c-b359-4490-aa05-065885c31c33 req-98d86bd5-ca4f-43a5-bd67-edc145b6e9fe service nova] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Received event network-changed-95e5d41a-5998-4d48-9aec-6255c74c448f {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1933.453178] env[63024]: DEBUG nova.compute.manager [req-e845024c-b359-4490-aa05-065885c31c33 req-98d86bd5-ca4f-43a5-bd67-edc145b6e9fe service nova] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Refreshing instance network info cache due to event network-changed-95e5d41a-5998-4d48-9aec-6255c74c448f. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1933.453312] env[63024]: DEBUG oslo_concurrency.lockutils [req-e845024c-b359-4490-aa05-065885c31c33 req-98d86bd5-ca4f-43a5-bd67-edc145b6e9fe service nova] Acquiring lock "refresh_cache-ea24d375-ba88-42ca-a07e-52000ec613c0" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1933.453724] env[63024]: DEBUG oslo_concurrency.lockutils [req-e845024c-b359-4490-aa05-065885c31c33 req-98d86bd5-ca4f-43a5-bd67-edc145b6e9fe service nova] Acquired lock "refresh_cache-ea24d375-ba88-42ca-a07e-52000ec613c0" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1933.453724] env[63024]: DEBUG nova.network.neutron [req-e845024c-b359-4490-aa05-065885c31c33 req-98d86bd5-ca4f-43a5-bd67-edc145b6e9fe service nova] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Refreshing network info cache for port 95e5d41a-5998-4d48-9aec-6255c74c448f {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1933.603427] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951345, 'name': Rename_Task, 'duration_secs': 0.198756} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1933.604140] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1933.607795] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-273e61ac-0631-44c6-8446-006c952820b8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.613704] env[63024]: DEBUG oslo_vmware.api [None req-758ca82e-1e5b-4167-8551-43564c654821 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951346, 'name': RemoveSnapshot_Task} progress is 17%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.620228] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Waiting for the task: (returnval){ [ 1933.620228] env[63024]: value = "task-1951348" [ 1933.620228] env[63024]: _type = "Task" [ 1933.620228] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1933.650515] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951348, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.658755] env[63024]: DEBUG oslo_vmware.api [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951347, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.877621] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f05a8596-c0a4-4cff-9ea8-dbaff102e810 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.887904] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-951d30f2-bb70-4c67-b3e5-692d0d7ed8b6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.922162] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2bb0c62-4648-4171-afa5-898976230892 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.932167] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e682a72c-b61c-4b5f-b56e-f0d857864d6f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1933.940714] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951339, 'name': CreateVM_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1933.951790] env[63024]: DEBUG nova.compute.provider_tree [None req-4a41005a-6633-4657-bf89-41217217cc32 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1933.955875] env[63024]: DEBUG oslo_concurrency.lockutils [None req-777e2363-2236-48e0-946c-05ee669393fb tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Releasing lock "refresh_cache-01b8072a-4483-4932-8294-7e5b48e6b203" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1933.957674] env[63024]: DEBUG nova.objects.instance [None req-777e2363-2236-48e0-946c-05ee669393fb tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Lazy-loading 'flavor' on Instance uuid 01b8072a-4483-4932-8294-7e5b48e6b203 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1934.105420] env[63024]: DEBUG oslo_vmware.api [None req-758ca82e-1e5b-4167-8551-43564c654821 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951346, 'name': RemoveSnapshot_Task, 'duration_secs': 0.984562} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1934.105855] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-758ca82e-1e5b-4167-8551-43564c654821 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Deleted Snapshot of the VM instance {{(pid=63024) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1934.105925] env[63024]: INFO nova.compute.manager [None req-758ca82e-1e5b-4167-8551-43564c654821 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Took 17.33 seconds to snapshot the instance on the hypervisor. [ 1934.130930] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951348, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1934.145969] env[63024]: DEBUG oslo_vmware.api [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951347, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.572147} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1934.146266] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 0f371c69-c7ae-4649-b038-be82e8ca74e1/0f371c69-c7ae-4649-b038-be82e8ca74e1.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1934.146474] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1934.146742] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e040f206-db63-4292-bb6d-0cfd37139833 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.157168] env[63024]: DEBUG oslo_vmware.api [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 1934.157168] env[63024]: value = "task-1951349" [ 1934.157168] env[63024]: _type = "Task" [ 1934.157168] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1934.166684] env[63024]: DEBUG oslo_vmware.api [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951349, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1934.306464] env[63024]: DEBUG nova.network.neutron [req-e845024c-b359-4490-aa05-065885c31c33 req-98d86bd5-ca4f-43a5-bd67-edc145b6e9fe service nova] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Updated VIF entry in instance network info cache for port 95e5d41a-5998-4d48-9aec-6255c74c448f. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1934.306729] env[63024]: DEBUG nova.network.neutron [req-e845024c-b359-4490-aa05-065885c31c33 req-98d86bd5-ca4f-43a5-bd67-edc145b6e9fe service nova] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Updating instance_info_cache with network_info: [{"id": "95e5d41a-5998-4d48-9aec-6255c74c448f", "address": "fa:16:3e:c2:99:b7", "network": {"id": "18684658-e754-4649-b059-43f84e447803", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-48651862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d0715f0ccbd49ec8af8e3049d970994", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98011432-48cc-4ffd-a5a8-b96d2ea4424a", "external-id": "nsx-vlan-transportzone-745", "segmentation_id": 745, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap95e5d41a-59", "ovs_interfaceid": "95e5d41a-5998-4d48-9aec-6255c74c448f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1934.343026] env[63024]: DEBUG nova.compute.manager [None req-97741c4d-af95-459b-839d-2cc3e8b7a41a tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1934.343875] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9444629-dff8-4a85-8fba-f19e8cffcef8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.436308] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951339, 'name': CreateVM_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1934.456770] env[63024]: DEBUG nova.scheduler.client.report [None req-4a41005a-6633-4657-bf89-41217217cc32 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1934.464754] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b68695c-776f-4219-8ea0-86a8e7a3e188 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.494777] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-777e2363-2236-48e0-946c-05ee669393fb tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1934.495963] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b3466a98-9236-4264-a3cb-06106d12b7cd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.507111] env[63024]: DEBUG oslo_vmware.api [None req-777e2363-2236-48e0-946c-05ee669393fb tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1934.507111] env[63024]: value = "task-1951350" [ 1934.507111] env[63024]: _type = "Task" [ 1934.507111] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1934.519052] env[63024]: DEBUG oslo_vmware.api [None req-777e2363-2236-48e0-946c-05ee669393fb tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951350, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1934.632820] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951348, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1934.669570] env[63024]: DEBUG oslo_vmware.api [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951349, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077872} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1934.669859] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1934.670683] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b942ef47-1645-4f08-bd1d-0e38ee11123e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.681676] env[63024]: DEBUG nova.compute.manager [None req-758ca82e-1e5b-4167-8551-43564c654821 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Found 2 images (rotation: 2) {{(pid=63024) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4884}} [ 1934.711227] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Reconfiguring VM instance instance-0000004a to attach disk [datastore1] 0f371c69-c7ae-4649-b038-be82e8ca74e1/0f371c69-c7ae-4649-b038-be82e8ca74e1.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1934.713621] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c6b4973e-4821-4641-a93c-3ec797b0ac6e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.742553] env[63024]: DEBUG oslo_vmware.api [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 1934.742553] env[63024]: value = "task-1951351" [ 1934.742553] env[63024]: _type = "Task" [ 1934.742553] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1934.756933] env[63024]: DEBUG oslo_vmware.api [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951351, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1934.790146] env[63024]: DEBUG nova.network.neutron [None req-020be388-2599-472a-96af-9116384d5939 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Successfully updated port: 16467e49-8cf6-465f-98d5-471892d9f322 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1934.809314] env[63024]: DEBUG oslo_concurrency.lockutils [req-e845024c-b359-4490-aa05-065885c31c33 req-98d86bd5-ca4f-43a5-bd67-edc145b6e9fe service nova] Releasing lock "refresh_cache-ea24d375-ba88-42ca-a07e-52000ec613c0" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1934.864222] env[63024]: INFO nova.compute.manager [None req-97741c4d-af95-459b-839d-2cc3e8b7a41a tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] instance snapshotting [ 1934.869373] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41b4109c-aec9-4ac9-9c9a-7ab132cce297 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.896999] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-368e290f-56ac-4c47-8a5d-0c44edd2d2b8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1934.940462] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951339, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1934.964227] env[63024]: DEBUG nova.compute.manager [req-542ccf0e-2ee0-4571-a7c8-c8c3c30b94a7 req-f6d02d24-d35d-4270-a27c-63ff743472e3 service nova] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Received event network-vif-plugged-16467e49-8cf6-465f-98d5-471892d9f322 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1934.964227] env[63024]: DEBUG oslo_concurrency.lockutils [req-542ccf0e-2ee0-4571-a7c8-c8c3c30b94a7 req-f6d02d24-d35d-4270-a27c-63ff743472e3 service nova] Acquiring lock "9a7f4452-ae50-4779-8474-11d3a6d3533f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1934.964227] env[63024]: DEBUG oslo_concurrency.lockutils [req-542ccf0e-2ee0-4571-a7c8-c8c3c30b94a7 req-f6d02d24-d35d-4270-a27c-63ff743472e3 service nova] Lock "9a7f4452-ae50-4779-8474-11d3a6d3533f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1934.964227] env[63024]: DEBUG oslo_concurrency.lockutils [req-542ccf0e-2ee0-4571-a7c8-c8c3c30b94a7 req-f6d02d24-d35d-4270-a27c-63ff743472e3 service nova] Lock "9a7f4452-ae50-4779-8474-11d3a6d3533f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1934.964392] env[63024]: DEBUG nova.compute.manager [req-542ccf0e-2ee0-4571-a7c8-c8c3c30b94a7 req-f6d02d24-d35d-4270-a27c-63ff743472e3 service nova] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] No waiting events found dispatching network-vif-plugged-16467e49-8cf6-465f-98d5-471892d9f322 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1934.964553] env[63024]: WARNING nova.compute.manager [req-542ccf0e-2ee0-4571-a7c8-c8c3c30b94a7 req-f6d02d24-d35d-4270-a27c-63ff743472e3 service nova] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Received unexpected event network-vif-plugged-16467e49-8cf6-465f-98d5-471892d9f322 for instance with vm_state active and task_state None. [ 1934.965522] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4a41005a-6633-4657-bf89-41217217cc32 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.153s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1934.967811] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 13.013s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1935.007371] env[63024]: INFO nova.scheduler.client.report [None req-4a41005a-6633-4657-bf89-41217217cc32 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Deleted allocations for instance 43cdc362-588f-42cc-a4b2-a08fe60293a5 [ 1935.021348] env[63024]: DEBUG oslo_vmware.api [None req-777e2363-2236-48e0-946c-05ee669393fb tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951350, 'name': PowerOffVM_Task, 'duration_secs': 0.29438} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1935.021348] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-777e2363-2236-48e0-946c-05ee669393fb tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1935.025873] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-777e2363-2236-48e0-946c-05ee669393fb tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Reconfiguring VM instance instance-00000048 to detach disk 2002 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1935.026591] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9301d255-b055-4b99-a31c-a981062abc7e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.047690] env[63024]: DEBUG oslo_vmware.api [None req-777e2363-2236-48e0-946c-05ee669393fb tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1935.047690] env[63024]: value = "task-1951352" [ 1935.047690] env[63024]: _type = "Task" [ 1935.047690] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1935.057093] env[63024]: DEBUG oslo_vmware.api [None req-777e2363-2236-48e0-946c-05ee669393fb tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951352, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1935.132346] env[63024]: DEBUG oslo_vmware.api [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951348, 'name': PowerOnVM_Task, 'duration_secs': 1.411697} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1935.132618] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1935.132830] env[63024]: INFO nova.compute.manager [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Took 11.23 seconds to spawn the instance on the hypervisor. [ 1935.133067] env[63024]: DEBUG nova.compute.manager [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1935.133873] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e9b027b-4257-432b-8f35-070a57b7702d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.255276] env[63024]: DEBUG oslo_vmware.api [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951351, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1935.294141] env[63024]: DEBUG oslo_concurrency.lockutils [None req-020be388-2599-472a-96af-9116384d5939 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "refresh_cache-9a7f4452-ae50-4779-8474-11d3a6d3533f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1935.294365] env[63024]: DEBUG oslo_concurrency.lockutils [None req-020be388-2599-472a-96af-9116384d5939 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquired lock "refresh_cache-9a7f4452-ae50-4779-8474-11d3a6d3533f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1935.295500] env[63024]: DEBUG nova.network.neutron [None req-020be388-2599-472a-96af-9116384d5939 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1935.377654] env[63024]: DEBUG oslo_concurrency.lockutils [None req-05bb1c70-3b7e-4b59-9d2a-2214d5bafd95 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquiring lock "b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1935.377950] env[63024]: DEBUG oslo_concurrency.lockutils [None req-05bb1c70-3b7e-4b59-9d2a-2214d5bafd95 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Lock "b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1935.378197] env[63024]: DEBUG oslo_concurrency.lockutils [None req-05bb1c70-3b7e-4b59-9d2a-2214d5bafd95 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquiring lock "b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1935.378418] env[63024]: DEBUG oslo_concurrency.lockutils [None req-05bb1c70-3b7e-4b59-9d2a-2214d5bafd95 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Lock "b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1935.378619] env[63024]: DEBUG oslo_concurrency.lockutils [None req-05bb1c70-3b7e-4b59-9d2a-2214d5bafd95 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Lock "b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1935.380917] env[63024]: INFO nova.compute.manager [None req-05bb1c70-3b7e-4b59-9d2a-2214d5bafd95 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] Terminating instance [ 1935.415013] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-97741c4d-af95-459b-839d-2cc3e8b7a41a tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Creating Snapshot of the VM instance {{(pid=63024) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1935.415354] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-132fed55-40a2-48a6-afed-a995c42f4bbd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.424025] env[63024]: DEBUG oslo_vmware.api [None req-97741c4d-af95-459b-839d-2cc3e8b7a41a tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Waiting for the task: (returnval){ [ 1935.424025] env[63024]: value = "task-1951353" [ 1935.424025] env[63024]: _type = "Task" [ 1935.424025] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1935.440109] env[63024]: DEBUG oslo_vmware.api [None req-97741c4d-af95-459b-839d-2cc3e8b7a41a tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951353, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1935.443153] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951339, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1935.518268] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4a41005a-6633-4657-bf89-41217217cc32 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Lock "43cdc362-588f-42cc-a4b2-a08fe60293a5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.352s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1935.560192] env[63024]: DEBUG oslo_vmware.api [None req-777e2363-2236-48e0-946c-05ee669393fb tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951352, 'name': ReconfigVM_Task, 'duration_secs': 0.389969} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1935.560548] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-777e2363-2236-48e0-946c-05ee669393fb tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Reconfigured VM instance instance-00000048 to detach disk 2002 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1935.560775] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-777e2363-2236-48e0-946c-05ee669393fb tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1935.561110] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4e2dfca1-1c1d-437d-ae41-ba314e7e6b5a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.568411] env[63024]: DEBUG oslo_vmware.api [None req-777e2363-2236-48e0-946c-05ee669393fb tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1935.568411] env[63024]: value = "task-1951354" [ 1935.568411] env[63024]: _type = "Task" [ 1935.568411] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1935.577741] env[63024]: DEBUG oslo_vmware.api [None req-777e2363-2236-48e0-946c-05ee669393fb tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951354, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1935.654499] env[63024]: INFO nova.compute.manager [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Took 40.11 seconds to build instance. [ 1935.754664] env[63024]: DEBUG oslo_vmware.api [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951351, 'name': ReconfigVM_Task, 'duration_secs': 0.696936} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1935.754664] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Reconfigured VM instance instance-0000004a to attach disk [datastore1] 0f371c69-c7ae-4649-b038-be82e8ca74e1/0f371c69-c7ae-4649-b038-be82e8ca74e1.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1935.754664] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-06477e8a-4bfa-418a-bb2c-46be812d39f0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.763024] env[63024]: DEBUG oslo_vmware.api [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 1935.763024] env[63024]: value = "task-1951355" [ 1935.763024] env[63024]: _type = "Task" [ 1935.763024] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1935.771862] env[63024]: DEBUG oslo_vmware.api [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951355, 'name': Rename_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1935.846024] env[63024]: WARNING nova.network.neutron [None req-020be388-2599-472a-96af-9116384d5939 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] ffb24eaf-c6b6-414f-a69a-0c8806712ddd already exists in list: networks containing: ['ffb24eaf-c6b6-414f-a69a-0c8806712ddd']. ignoring it [ 1935.846137] env[63024]: WARNING nova.network.neutron [None req-020be388-2599-472a-96af-9116384d5939 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] ffb24eaf-c6b6-414f-a69a-0c8806712ddd already exists in list: networks containing: ['ffb24eaf-c6b6-414f-a69a-0c8806712ddd']. ignoring it [ 1935.887599] env[63024]: DEBUG nova.compute.manager [None req-05bb1c70-3b7e-4b59-9d2a-2214d5bafd95 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1935.887950] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-05bb1c70-3b7e-4b59-9d2a-2214d5bafd95 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1935.888834] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0da905b2-d108-4eef-b780-8b451cf66255 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.897711] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-05bb1c70-3b7e-4b59-9d2a-2214d5bafd95 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1935.897941] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-73f0147f-d088-4d16-bc48-8bbb528f0cbf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.907674] env[63024]: DEBUG oslo_vmware.api [None req-05bb1c70-3b7e-4b59-9d2a-2214d5bafd95 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1935.907674] env[63024]: value = "task-1951356" [ 1935.907674] env[63024]: _type = "Task" [ 1935.907674] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1935.922438] env[63024]: DEBUG oslo_vmware.api [None req-05bb1c70-3b7e-4b59-9d2a-2214d5bafd95 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951356, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1935.938556] env[63024]: DEBUG oslo_vmware.api [None req-97741c4d-af95-459b-839d-2cc3e8b7a41a tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951353, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1935.946189] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951339, 'name': CreateVM_Task, 'duration_secs': 3.643435} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1935.949861] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1935.950899] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1935.950899] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1935.951116] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1935.951838] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae48a8e6-69ce-4b50-8286-5d0df99dccd6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1935.957788] env[63024]: DEBUG oslo_vmware.api [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 1935.957788] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]529b2027-05a1-5e60-2a4f-f33cf4f05bd4" [ 1935.957788] env[63024]: _type = "Task" [ 1935.957788] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1935.968674] env[63024]: DEBUG oslo_vmware.api [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]529b2027-05a1-5e60-2a4f-f33cf4f05bd4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1935.985431] env[63024]: INFO nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Updating resource usage from migration 6aa2db89-5a8c-414f-93a4-16db64f9c2e6 [ 1936.011442] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1936.011593] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance e8ad74ce-7862-4574-98e7-14bc54bd5d6c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1936.011720] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance df2933d1-32c3-48a6-8ceb-d5e3047d0b78 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1936.011870] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance c1fd4146-6dd3-49e9-a744-466e6168e158 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1936.011995] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 82b7019c-5049-4b8b-abb4-46f326ce3d5b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1936.012127] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance b588ea21-dea0-4ee6-8f9e-12007d0a1ce1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1936.012241] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 31a693b6-293a-4f01-9baf-a9e7e8d453d4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1936.012354] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 601a003d-811c-4698-b0b6-054482d32c21 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1936.012465] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 839776ef-0562-424d-b301-2aa896f32e14 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1936.012604] env[63024]: WARNING nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 9267e5e4-732d-47f1-8a30-d926a1269fb9 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1936.012823] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1936.012970] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 92d1f96e-bbe7-4654-9d3a-47ba40057157 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1936.013099] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance fe6847e2-a742-4338-983f-698c13aaefde actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1936.013211] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 73db94b8-cfa8-4457-bccb-d4b780edbd93 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1936.013318] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 77c27741-ee3a-4a8b-bbd3-89759288f7c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1936.013424] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 01b8072a-4483-4932-8294-7e5b48e6b203 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1936.013531] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 9a7f4452-ae50-4779-8474-11d3a6d3533f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1936.013636] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 0f371c69-c7ae-4649-b038-be82e8ca74e1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1936.013740] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance c28e7c21-7e7d-4cda-81e8-63538bd8a1f7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1936.013870] env[63024]: WARNING nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance a0a9ea07-dda8-45b4-bab9-cdaf683c0a21 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1936.013977] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 6e0aa58b-85e0-4e74-812f-cc01041ed6d3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1936.014112] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 669c45b0-34d6-45f8-a30e-b9b96cfd71ef actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1936.014220] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 96afa44e-d8c6-419c-ae69-04b7b306c2c5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1936.014326] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance ea24d375-ba88-42ca-a07e-52000ec613c0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1936.014432] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Migration 6aa2db89-5a8c-414f-93a4-16db64f9c2e6 is active on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 192, 'DISK_GB': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1936.014536] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 9e8e7b6e-1bb2-4e66-b734-2f56e31302af actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1936.082218] env[63024]: DEBUG oslo_vmware.api [None req-777e2363-2236-48e0-946c-05ee669393fb tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951354, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1936.156516] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fe794a67-99c0-4fe8-af41-b99291187376 tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Lock "96afa44e-d8c6-419c-ae69-04b7b306c2c5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.624s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1936.272941] env[63024]: DEBUG oslo_vmware.api [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951355, 'name': Rename_Task, 'duration_secs': 0.174757} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1936.273267] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1936.273531] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-022e5c18-456b-4dda-bed5-a28ea21ad90c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.282405] env[63024]: DEBUG oslo_vmware.api [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 1936.282405] env[63024]: value = "task-1951357" [ 1936.282405] env[63024]: _type = "Task" [ 1936.282405] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1936.296431] env[63024]: DEBUG oslo_vmware.api [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951357, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1936.420284] env[63024]: DEBUG oslo_vmware.api [None req-05bb1c70-3b7e-4b59-9d2a-2214d5bafd95 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951356, 'name': PowerOffVM_Task, 'duration_secs': 0.211632} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1936.420567] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-05bb1c70-3b7e-4b59-9d2a-2214d5bafd95 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1936.420761] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-05bb1c70-3b7e-4b59-9d2a-2214d5bafd95 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1936.420999] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-38e0253a-e9c0-46ac-bd49-39ad1cc88560 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.437163] env[63024]: DEBUG oslo_vmware.api [None req-97741c4d-af95-459b-839d-2cc3e8b7a41a tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951353, 'name': CreateSnapshot_Task, 'duration_secs': 0.559105} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1936.437523] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-97741c4d-af95-459b-839d-2cc3e8b7a41a tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Created Snapshot of the VM instance {{(pid=63024) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1936.438357] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c48a6444-e327-4f69-835e-e577f5a3c1ec {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.456809] env[63024]: DEBUG nova.network.neutron [None req-020be388-2599-472a-96af-9116384d5939 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Updating instance_info_cache with network_info: [{"id": "989997b7-12bd-4924-97e2-a65914c47536", "address": "fa:16:3e:3b:9f:01", "network": {"id": "ffb24eaf-c6b6-414f-a69a-0c8806712ddd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-281590202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.234", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9521048e807c4ca2a6d2e74a72b829a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap989997b7-12", "ovs_interfaceid": "989997b7-12bd-4924-97e2-a65914c47536", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "42f23b07-4f23-454c-bdba-e075cd549205", "address": "fa:16:3e:e2:e8:c5", "network": {"id": "ffb24eaf-c6b6-414f-a69a-0c8806712ddd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-281590202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9521048e807c4ca2a6d2e74a72b829a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap42f23b07-4f", "ovs_interfaceid": "42f23b07-4f23-454c-bdba-e075cd549205", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "16467e49-8cf6-465f-98d5-471892d9f322", "address": "fa:16:3e:ce:93:6f", "network": {"id": "ffb24eaf-c6b6-414f-a69a-0c8806712ddd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-281590202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9521048e807c4ca2a6d2e74a72b829a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16467e49-8c", "ovs_interfaceid": "16467e49-8cf6-465f-98d5-471892d9f322", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1936.471211] env[63024]: DEBUG oslo_vmware.api [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]529b2027-05a1-5e60-2a4f-f33cf4f05bd4, 'name': SearchDatastore_Task, 'duration_secs': 0.017361} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1936.471211] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1936.471211] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1936.471211] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1936.471211] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1936.471211] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1936.471693] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cfbab3b0-8f6b-4cf8-b351-4bd06c4121c1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.483681] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1936.483878] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1936.484730] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41aaf570-4cd9-4f03-b3ca-c378f1dccde6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.494600] env[63024]: DEBUG oslo_vmware.api [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 1936.494600] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52725b65-0fb8-af19-22b1-59d28a09c5e3" [ 1936.494600] env[63024]: _type = "Task" [ 1936.494600] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1936.506717] env[63024]: DEBUG oslo_vmware.api [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52725b65-0fb8-af19-22b1-59d28a09c5e3, 'name': SearchDatastore_Task, 'duration_secs': 0.012142} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1936.508225] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e94b3a5f-5db3-4ed0-ae99-025300c13016 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.515889] env[63024]: DEBUG oslo_vmware.api [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 1936.515889] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52095417-6f2f-fd40-c9ba-d0c2c435ca14" [ 1936.515889] env[63024]: _type = "Task" [ 1936.515889] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1936.522229] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 9e32eb32-6eff-4875-b4a3-adfab4647023 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1936.522229] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Total usable vcpus: 48, total allocated vcpus: 23 {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1936.522229] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4928MB phys_disk=200GB used_disk=22GB total_vcpus=48 used_vcpus=23 pci_stats=[] {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1936.524987] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-05bb1c70-3b7e-4b59-9d2a-2214d5bafd95 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1936.525982] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-05bb1c70-3b7e-4b59-9d2a-2214d5bafd95 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1936.525982] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-05bb1c70-3b7e-4b59-9d2a-2214d5bafd95 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Deleting the datastore file [datastore1] b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1936.525982] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c8535ceb-fae9-4d17-8536-d9b7858a8205 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.532029] env[63024]: DEBUG oslo_vmware.api [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52095417-6f2f-fd40-c9ba-d0c2c435ca14, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1936.536678] env[63024]: DEBUG oslo_vmware.api [None req-05bb1c70-3b7e-4b59-9d2a-2214d5bafd95 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1936.536678] env[63024]: value = "task-1951360" [ 1936.536678] env[63024]: _type = "Task" [ 1936.536678] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1936.546423] env[63024]: DEBUG oslo_vmware.api [None req-05bb1c70-3b7e-4b59-9d2a-2214d5bafd95 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951360, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1936.581202] env[63024]: DEBUG oslo_vmware.api [None req-777e2363-2236-48e0-946c-05ee669393fb tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951354, 'name': PowerOnVM_Task, 'duration_secs': 0.569915} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1936.581504] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-777e2363-2236-48e0-946c-05ee669393fb tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1936.581771] env[63024]: DEBUG nova.compute.manager [None req-777e2363-2236-48e0-946c-05ee669393fb tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1936.582605] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-501616af-b3b3-4189-9ac8-e9020df2b926 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.795123] env[63024]: DEBUG oslo_vmware.api [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951357, 'name': PowerOnVM_Task, 'duration_secs': 0.511085} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1936.795417] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1936.795627] env[63024]: DEBUG nova.compute.manager [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1936.796434] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d054a4fd-a003-49e4-90a3-a2a3437fabb9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.933019] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee15af41-245e-4ea6-8408-271cd92aa5d4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.942327] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d64e493e-5b55-4962-8bff-debe23c7c37a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.981468] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-97741c4d-af95-459b-839d-2cc3e8b7a41a tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Creating linked-clone VM from snapshot {{(pid=63024) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1936.982298] env[63024]: DEBUG oslo_concurrency.lockutils [None req-020be388-2599-472a-96af-9116384d5939 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Releasing lock "refresh_cache-9a7f4452-ae50-4779-8474-11d3a6d3533f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1936.982737] env[63024]: DEBUG oslo_concurrency.lockutils [None req-020be388-2599-472a-96af-9116384d5939 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "9a7f4452-ae50-4779-8474-11d3a6d3533f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1936.986581] env[63024]: DEBUG oslo_concurrency.lockutils [None req-020be388-2599-472a-96af-9116384d5939 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquired lock "9a7f4452-ae50-4779-8474-11d3a6d3533f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1936.986581] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-d24f2b9e-062a-4c67-b897-cd2b39b5acad {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.986989] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e88297e-9912-4339-b3f5-115ee434f612 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1936.989978] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d14d3c33-ff81-4247-96d7-bb55cc73e5fa {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.013341] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bda35172-d7b7-45b4-9359-7037ab3fa61c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.017029] env[63024]: DEBUG oslo_vmware.api [None req-97741c4d-af95-459b-839d-2cc3e8b7a41a tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Waiting for the task: (returnval){ [ 1937.017029] env[63024]: value = "task-1951361" [ 1937.017029] env[63024]: _type = "Task" [ 1937.017029] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1937.017029] env[63024]: DEBUG nova.virt.hardware [None req-020be388-2599-472a-96af-9116384d5939 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1937.017029] env[63024]: DEBUG nova.virt.hardware [None req-020be388-2599-472a-96af-9116384d5939 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1937.017029] env[63024]: DEBUG nova.virt.hardware [None req-020be388-2599-472a-96af-9116384d5939 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1937.017029] env[63024]: DEBUG nova.virt.hardware [None req-020be388-2599-472a-96af-9116384d5939 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1937.017029] env[63024]: DEBUG nova.virt.hardware [None req-020be388-2599-472a-96af-9116384d5939 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1937.017029] env[63024]: DEBUG nova.virt.hardware [None req-020be388-2599-472a-96af-9116384d5939 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1937.017029] env[63024]: DEBUG nova.virt.hardware [None req-020be388-2599-472a-96af-9116384d5939 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1937.017029] env[63024]: DEBUG nova.virt.hardware [None req-020be388-2599-472a-96af-9116384d5939 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1937.017673] env[63024]: DEBUG nova.virt.hardware [None req-020be388-2599-472a-96af-9116384d5939 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1937.017673] env[63024]: DEBUG nova.virt.hardware [None req-020be388-2599-472a-96af-9116384d5939 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1937.017673] env[63024]: DEBUG nova.virt.hardware [None req-020be388-2599-472a-96af-9116384d5939 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1937.023990] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-020be388-2599-472a-96af-9116384d5939 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Reconfiguring VM to attach interface {{(pid=63024) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1937.025053] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ff004af7-5ec2-43ab-9d94-08573b8c3f92 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.057463] env[63024]: DEBUG nova.compute.provider_tree [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1937.070066] env[63024]: DEBUG oslo_vmware.api [None req-05bb1c70-3b7e-4b59-9d2a-2214d5bafd95 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951360, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.314996} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1937.070347] env[63024]: DEBUG oslo_vmware.api [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52095417-6f2f-fd40-c9ba-d0c2c435ca14, 'name': SearchDatastore_Task, 'duration_secs': 0.013863} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1937.074834] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-05bb1c70-3b7e-4b59-9d2a-2214d5bafd95 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1937.075056] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-05bb1c70-3b7e-4b59-9d2a-2214d5bafd95 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1937.075456] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-05bb1c70-3b7e-4b59-9d2a-2214d5bafd95 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1937.075456] env[63024]: INFO nova.compute.manager [None req-05bb1c70-3b7e-4b59-9d2a-2214d5bafd95 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1937.075694] env[63024]: DEBUG oslo.service.loopingcall [None req-05bb1c70-3b7e-4b59-9d2a-2214d5bafd95 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1937.075988] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1937.076534] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] ea24d375-ba88-42ca-a07e-52000ec613c0/ea24d375-ba88-42ca-a07e-52000ec613c0.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1937.076798] env[63024]: DEBUG oslo_vmware.api [None req-020be388-2599-472a-96af-9116384d5939 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 1937.076798] env[63024]: value = "task-1951362" [ 1937.076798] env[63024]: _type = "Task" [ 1937.076798] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1937.077347] env[63024]: DEBUG oslo_vmware.api [None req-97741c4d-af95-459b-839d-2cc3e8b7a41a tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951361, 'name': CloneVM_Task} progress is 23%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1937.077578] env[63024]: DEBUG nova.compute.manager [-] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1937.077710] env[63024]: DEBUG nova.network.neutron [-] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1937.079432] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1f04b9e4-98f7-48fc-a1c2-d4fb648ca47e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1937.092711] env[63024]: DEBUG oslo_vmware.api [None req-020be388-2599-472a-96af-9116384d5939 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951362, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1937.096750] env[63024]: DEBUG oslo_vmware.api [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 1937.096750] env[63024]: value = "task-1951363" [ 1937.096750] env[63024]: _type = "Task" [ 1937.096750] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1937.109135] env[63024]: DEBUG oslo_vmware.api [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951363, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1937.140035] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Acquiring lock "12e63b42-5554-44d5-86eb-d592bc0b2ad6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1937.140035] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Lock "12e63b42-5554-44d5-86eb-d592bc0b2ad6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1937.178462] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Acquiring lock "c71abfaa-dc65-4d1b-8a34-dff9dd682fe7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1937.178776] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Lock "c71abfaa-dc65-4d1b-8a34-dff9dd682fe7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1937.216724] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Acquiring lock "6d21976b-f519-4c87-a0d2-0a406060608d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1937.216982] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Lock "6d21976b-f519-4c87-a0d2-0a406060608d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1937.323544] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1937.540804] env[63024]: DEBUG oslo_vmware.api [None req-97741c4d-af95-459b-839d-2cc3e8b7a41a tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951361, 'name': CloneVM_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1937.561017] env[63024]: DEBUG nova.scheduler.client.report [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1937.595586] env[63024]: DEBUG oslo_vmware.api [None req-020be388-2599-472a-96af-9116384d5939 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951362, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1937.609596] env[63024]: DEBUG oslo_vmware.api [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951363, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1937.642226] env[63024]: DEBUG nova.compute.manager [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1937.682359] env[63024]: DEBUG nova.compute.manager [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1937.719236] env[63024]: DEBUG nova.compute.manager [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1937.857771] env[63024]: DEBUG nova.network.neutron [-] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1937.969020] env[63024]: DEBUG nova.compute.manager [None req-b4a87a78-d330-4fd1-946b-a8894bc3f270 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1937.970225] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95020f32-9d8e-404f-918e-eabe761ad7c4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.035398] env[63024]: DEBUG oslo_vmware.api [None req-97741c4d-af95-459b-839d-2cc3e8b7a41a tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951361, 'name': CloneVM_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1938.066839] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63024) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1938.067077] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.099s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1938.067441] env[63024]: DEBUG oslo_concurrency.lockutils [None req-632d938b-1db3-45b7-98dc-39fbcb4d2f44 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.644s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1938.067628] env[63024]: DEBUG oslo_concurrency.lockutils [None req-632d938b-1db3-45b7-98dc-39fbcb4d2f44 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1938.069744] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 10.822s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1938.094792] env[63024]: DEBUG oslo_vmware.api [None req-020be388-2599-472a-96af-9116384d5939 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951362, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1938.095820] env[63024]: INFO nova.scheduler.client.report [None req-632d938b-1db3-45b7-98dc-39fbcb4d2f44 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Deleted allocations for instance 9267e5e4-732d-47f1-8a30-d926a1269fb9 [ 1938.111268] env[63024]: DEBUG oslo_vmware.api [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951363, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.638066} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1938.111548] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] ea24d375-ba88-42ca-a07e-52000ec613c0/ea24d375-ba88-42ca-a07e-52000ec613c0.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1938.111791] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1938.112146] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ed2461f8-557e-4adb-b4ca-598f0214a328 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.122055] env[63024]: DEBUG oslo_vmware.api [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 1938.122055] env[63024]: value = "task-1951364" [ 1938.122055] env[63024]: _type = "Task" [ 1938.122055] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1938.132072] env[63024]: DEBUG oslo_vmware.api [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951364, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1938.163924] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1938.203217] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1938.239101] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1938.341376] env[63024]: DEBUG nova.compute.manager [req-f76630d2-a3ed-4bf7-a708-d7023ef3a41e req-0fe602ad-2719-418d-be7a-1035e897d92e service nova] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Received event network-changed-16467e49-8cf6-465f-98d5-471892d9f322 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1938.341535] env[63024]: DEBUG nova.compute.manager [req-f76630d2-a3ed-4bf7-a708-d7023ef3a41e req-0fe602ad-2719-418d-be7a-1035e897d92e service nova] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Refreshing instance network info cache due to event network-changed-16467e49-8cf6-465f-98d5-471892d9f322. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1938.341827] env[63024]: DEBUG oslo_concurrency.lockutils [req-f76630d2-a3ed-4bf7-a708-d7023ef3a41e req-0fe602ad-2719-418d-be7a-1035e897d92e service nova] Acquiring lock "refresh_cache-9a7f4452-ae50-4779-8474-11d3a6d3533f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1938.342017] env[63024]: DEBUG oslo_concurrency.lockutils [req-f76630d2-a3ed-4bf7-a708-d7023ef3a41e req-0fe602ad-2719-418d-be7a-1035e897d92e service nova] Acquired lock "refresh_cache-9a7f4452-ae50-4779-8474-11d3a6d3533f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1938.342227] env[63024]: DEBUG nova.network.neutron [req-f76630d2-a3ed-4bf7-a708-d7023ef3a41e req-0fe602ad-2719-418d-be7a-1035e897d92e service nova] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Refreshing network info cache for port 16467e49-8cf6-465f-98d5-471892d9f322 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1938.360438] env[63024]: INFO nova.compute.manager [-] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] Took 1.28 seconds to deallocate network for instance. [ 1938.482934] env[63024]: INFO nova.compute.manager [None req-b4a87a78-d330-4fd1-946b-a8894bc3f270 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] instance snapshotting [ 1938.483692] env[63024]: DEBUG nova.objects.instance [None req-b4a87a78-d330-4fd1-946b-a8894bc3f270 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lazy-loading 'flavor' on Instance uuid fe6847e2-a742-4338-983f-698c13aaefde {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1938.532823] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba82cd5d-9aa6-4970-b1b2-1b173c8b0213 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Volume attach. Driver type: vmdk {{(pid=63024) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1938.533204] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba82cd5d-9aa6-4970-b1b2-1b173c8b0213 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402188', 'volume_id': '2cfc46b0-10e9-4f4c-8f58-7fff36954695', 'name': 'volume-2cfc46b0-10e9-4f4c-8f58-7fff36954695', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c28e7c21-7e7d-4cda-81e8-63538bd8a1f7', 'attached_at': '', 'detached_at': '', 'volume_id': '2cfc46b0-10e9-4f4c-8f58-7fff36954695', 'serial': '2cfc46b0-10e9-4f4c-8f58-7fff36954695'} {{(pid=63024) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1938.534239] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90b4c639-728c-4e08-a053-cc1a497b15f0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.541631] env[63024]: DEBUG oslo_vmware.api [None req-97741c4d-af95-459b-839d-2cc3e8b7a41a tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951361, 'name': CloneVM_Task} progress is 95%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1938.557031] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e87d803b-afed-47b1-be7d-0f312de749a7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.580058] env[63024]: INFO nova.compute.claims [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1938.592135] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba82cd5d-9aa6-4970-b1b2-1b173c8b0213 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] volume-2cfc46b0-10e9-4f4c-8f58-7fff36954695/volume-2cfc46b0-10e9-4f4c-8f58-7fff36954695.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1938.592989] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e793864b-823a-41b2-8d3e-155d49f01fca {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.613439] env[63024]: DEBUG oslo_concurrency.lockutils [None req-632d938b-1db3-45b7-98dc-39fbcb4d2f44 tempest-ServersWithSpecificFlavorTestJSON-153126588 tempest-ServersWithSpecificFlavorTestJSON-153126588-project-member] Lock "9267e5e4-732d-47f1-8a30-d926a1269fb9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.345s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1938.621038] env[63024]: DEBUG oslo_vmware.api [None req-020be388-2599-472a-96af-9116384d5939 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951362, 'name': ReconfigVM_Task, 'duration_secs': 1.069507} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1938.622906] env[63024]: DEBUG oslo_concurrency.lockutils [None req-020be388-2599-472a-96af-9116384d5939 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Releasing lock "9a7f4452-ae50-4779-8474-11d3a6d3533f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1938.623224] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-020be388-2599-472a-96af-9116384d5939 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Reconfigured VM to attach interface {{(pid=63024) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1938.626530] env[63024]: DEBUG oslo_vmware.api [None req-ba82cd5d-9aa6-4970-b1b2-1b173c8b0213 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1938.626530] env[63024]: value = "task-1951365" [ 1938.626530] env[63024]: _type = "Task" [ 1938.626530] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1938.641651] env[63024]: DEBUG oslo_vmware.api [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951364, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.168428} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1938.645846] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1938.646692] env[63024]: DEBUG oslo_vmware.api [None req-ba82cd5d-9aa6-4970-b1b2-1b173c8b0213 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951365, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1938.647671] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-196353da-6b1a-4ec8-adcc-3a803c2fa032 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.675833] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Reconfiguring VM instance instance-00000055 to attach disk [datastore1] ea24d375-ba88-42ca-a07e-52000ec613c0/ea24d375-ba88-42ca-a07e-52000ec613c0.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1938.676214] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0c852bd2-be2e-4c51-b85f-d70236069ac3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1938.698280] env[63024]: DEBUG oslo_vmware.api [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 1938.698280] env[63024]: value = "task-1951366" [ 1938.698280] env[63024]: _type = "Task" [ 1938.698280] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1938.707219] env[63024]: DEBUG oslo_vmware.api [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951366, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1938.867847] env[63024]: DEBUG oslo_concurrency.lockutils [None req-05bb1c70-3b7e-4b59-9d2a-2214d5bafd95 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1938.993070] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e54908bc-9660-4c90-9b5a-e8931020f815 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.016596] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7776a7c1-f169-477e-9ddd-99f35427d0b9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.036203] env[63024]: DEBUG oslo_vmware.api [None req-97741c4d-af95-459b-839d-2cc3e8b7a41a tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951361, 'name': CloneVM_Task, 'duration_secs': 1.889211} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1939.036482] env[63024]: INFO nova.virt.vmwareapi.vmops [None req-97741c4d-af95-459b-839d-2cc3e8b7a41a tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Created linked-clone VM from snapshot [ 1939.037244] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28017bba-53e2-44bd-9e49-d3f8e5690461 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.045740] env[63024]: DEBUG nova.virt.vmwareapi.images [None req-97741c4d-af95-459b-839d-2cc3e8b7a41a tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Uploading image 40f8ab95-7d6f-4198-8361-af5878051d90 {{(pid=63024) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1939.063170] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-97741c4d-af95-459b-839d-2cc3e8b7a41a tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Destroying the VM {{(pid=63024) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1939.063857] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-464c5c67-d75f-4385-8c33-d209e930557b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.074225] env[63024]: DEBUG oslo_vmware.api [None req-97741c4d-af95-459b-839d-2cc3e8b7a41a tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Waiting for the task: (returnval){ [ 1939.074225] env[63024]: value = "task-1951367" [ 1939.074225] env[63024]: _type = "Task" [ 1939.074225] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1939.085461] env[63024]: DEBUG oslo_vmware.api [None req-97741c4d-af95-459b-839d-2cc3e8b7a41a tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951367, 'name': Destroy_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1939.096023] env[63024]: INFO nova.compute.resource_tracker [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Updating resource usage from migration 6aa2db89-5a8c-414f-93a4-16db64f9c2e6 [ 1939.129302] env[63024]: DEBUG oslo_concurrency.lockutils [None req-020be388-2599-472a-96af-9116384d5939 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "interface-9a7f4452-ae50-4779-8474-11d3a6d3533f-16467e49-8cf6-465f-98d5-471892d9f322" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.292s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1939.144016] env[63024]: DEBUG oslo_vmware.api [None req-ba82cd5d-9aa6-4970-b1b2-1b173c8b0213 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951365, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1939.211202] env[63024]: DEBUG oslo_vmware.api [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951366, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1939.232953] env[63024]: DEBUG nova.network.neutron [req-f76630d2-a3ed-4bf7-a708-d7023ef3a41e req-0fe602ad-2719-418d-be7a-1035e897d92e service nova] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Updated VIF entry in instance network info cache for port 16467e49-8cf6-465f-98d5-471892d9f322. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1939.233521] env[63024]: DEBUG nova.network.neutron [req-f76630d2-a3ed-4bf7-a708-d7023ef3a41e req-0fe602ad-2719-418d-be7a-1035e897d92e service nova] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Updating instance_info_cache with network_info: [{"id": "989997b7-12bd-4924-97e2-a65914c47536", "address": "fa:16:3e:3b:9f:01", "network": {"id": "ffb24eaf-c6b6-414f-a69a-0c8806712ddd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-281590202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.234", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9521048e807c4ca2a6d2e74a72b829a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap989997b7-12", "ovs_interfaceid": "989997b7-12bd-4924-97e2-a65914c47536", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "42f23b07-4f23-454c-bdba-e075cd549205", "address": "fa:16:3e:e2:e8:c5", "network": {"id": "ffb24eaf-c6b6-414f-a69a-0c8806712ddd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-281590202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9521048e807c4ca2a6d2e74a72b829a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap42f23b07-4f", "ovs_interfaceid": "42f23b07-4f23-454c-bdba-e075cd549205", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "16467e49-8cf6-465f-98d5-471892d9f322", "address": "fa:16:3e:ce:93:6f", "network": {"id": "ffb24eaf-c6b6-414f-a69a-0c8806712ddd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-281590202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9521048e807c4ca2a6d2e74a72b829a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap16467e49-8c", "ovs_interfaceid": "16467e49-8cf6-465f-98d5-471892d9f322", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1939.319512] env[63024]: DEBUG oslo_concurrency.lockutils [None req-48e1ca48-b7bf-406a-9617-f8f25e1b770a tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Acquiring lock "669c45b0-34d6-45f8-a30e-b9b96cfd71ef" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1939.319844] env[63024]: DEBUG oslo_concurrency.lockutils [None req-48e1ca48-b7bf-406a-9617-f8f25e1b770a tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Lock "669c45b0-34d6-45f8-a30e-b9b96cfd71ef" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1939.320116] env[63024]: DEBUG oslo_concurrency.lockutils [None req-48e1ca48-b7bf-406a-9617-f8f25e1b770a tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Acquiring lock "669c45b0-34d6-45f8-a30e-b9b96cfd71ef-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1939.320331] env[63024]: DEBUG oslo_concurrency.lockutils [None req-48e1ca48-b7bf-406a-9617-f8f25e1b770a tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Lock "669c45b0-34d6-45f8-a30e-b9b96cfd71ef-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1939.320505] env[63024]: DEBUG oslo_concurrency.lockutils [None req-48e1ca48-b7bf-406a-9617-f8f25e1b770a tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Lock "669c45b0-34d6-45f8-a30e-b9b96cfd71ef-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1939.325703] env[63024]: INFO nova.compute.manager [None req-48e1ca48-b7bf-406a-9617-f8f25e1b770a tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] Terminating instance [ 1939.534233] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b4a87a78-d330-4fd1-946b-a8894bc3f270 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Creating Snapshot of the VM instance {{(pid=63024) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1939.534233] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-78b54a69-b0ab-4e7a-9c24-b94ed185e765 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.543017] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd78eea2-7e90-47c1-aaf8-cb647dde27a2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.547449] env[63024]: DEBUG oslo_vmware.api [None req-b4a87a78-d330-4fd1-946b-a8894bc3f270 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 1939.547449] env[63024]: value = "task-1951368" [ 1939.547449] env[63024]: _type = "Task" [ 1939.547449] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1939.562901] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d738e94-93f8-450d-b33e-259d60b72327 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.571946] env[63024]: DEBUG oslo_vmware.api [None req-b4a87a78-d330-4fd1-946b-a8894bc3f270 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951368, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1939.607200] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3c6c7b4-8a0b-4581-b4c1-8705eff8f40e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.613746] env[63024]: DEBUG oslo_vmware.api [None req-97741c4d-af95-459b-839d-2cc3e8b7a41a tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951367, 'name': Destroy_Task} progress is 33%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1939.620323] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd53f0e0-c885-4bac-9fbb-395a66306f01 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.637895] env[63024]: DEBUG nova.compute.provider_tree [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1939.649361] env[63024]: DEBUG oslo_vmware.api [None req-ba82cd5d-9aa6-4970-b1b2-1b173c8b0213 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951365, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1939.710931] env[63024]: DEBUG oslo_vmware.api [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951366, 'name': ReconfigVM_Task, 'duration_secs': 0.563112} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1939.711340] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Reconfigured VM instance instance-00000055 to attach disk [datastore1] ea24d375-ba88-42ca-a07e-52000ec613c0/ea24d375-ba88-42ca-a07e-52000ec613c0.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1939.712097] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-769ab31c-ada7-4d40-9a88-b1f73333de73 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.724046] env[63024]: DEBUG oslo_vmware.api [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 1939.724046] env[63024]: value = "task-1951369" [ 1939.724046] env[63024]: _type = "Task" [ 1939.724046] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1939.735012] env[63024]: DEBUG oslo_vmware.api [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951369, 'name': Rename_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1939.736834] env[63024]: DEBUG oslo_concurrency.lockutils [req-f76630d2-a3ed-4bf7-a708-d7023ef3a41e req-0fe602ad-2719-418d-be7a-1035e897d92e service nova] Releasing lock "refresh_cache-9a7f4452-ae50-4779-8474-11d3a6d3533f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1939.830202] env[63024]: DEBUG nova.compute.manager [None req-48e1ca48-b7bf-406a-9617-f8f25e1b770a tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1939.830464] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-48e1ca48-b7bf-406a-9617-f8f25e1b770a tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1939.831401] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95cd57d1-c64a-4f36-9062-4b13bdc0917f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.845527] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-48e1ca48-b7bf-406a-9617-f8f25e1b770a tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1939.845840] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0fb51bd5-c4a1-4e26-8106-ca454eb2fa1b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1939.859056] env[63024]: DEBUG oslo_vmware.api [None req-48e1ca48-b7bf-406a-9617-f8f25e1b770a tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Waiting for the task: (returnval){ [ 1939.859056] env[63024]: value = "task-1951370" [ 1939.859056] env[63024]: _type = "Task" [ 1939.859056] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1939.871862] env[63024]: DEBUG oslo_vmware.api [None req-48e1ca48-b7bf-406a-9617-f8f25e1b770a tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951370, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.059593] env[63024]: DEBUG oslo_vmware.api [None req-b4a87a78-d330-4fd1-946b-a8894bc3f270 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951368, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.068278] env[63024]: DEBUG oslo_concurrency.lockutils [None req-909f7370-194b-47ad-bdaf-14b2067cafeb tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Acquiring lock "96afa44e-d8c6-419c-ae69-04b7b306c2c5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1940.068554] env[63024]: DEBUG oslo_concurrency.lockutils [None req-909f7370-194b-47ad-bdaf-14b2067cafeb tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Lock "96afa44e-d8c6-419c-ae69-04b7b306c2c5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1940.068763] env[63024]: DEBUG oslo_concurrency.lockutils [None req-909f7370-194b-47ad-bdaf-14b2067cafeb tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Acquiring lock "96afa44e-d8c6-419c-ae69-04b7b306c2c5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1940.068949] env[63024]: DEBUG oslo_concurrency.lockutils [None req-909f7370-194b-47ad-bdaf-14b2067cafeb tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Lock "96afa44e-d8c6-419c-ae69-04b7b306c2c5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1940.069142] env[63024]: DEBUG oslo_concurrency.lockutils [None req-909f7370-194b-47ad-bdaf-14b2067cafeb tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Lock "96afa44e-d8c6-419c-ae69-04b7b306c2c5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1940.071577] env[63024]: INFO nova.compute.manager [None req-909f7370-194b-47ad-bdaf-14b2067cafeb tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Terminating instance [ 1940.087794] env[63024]: DEBUG oslo_vmware.api [None req-97741c4d-af95-459b-839d-2cc3e8b7a41a tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951367, 'name': Destroy_Task, 'duration_secs': 0.646757} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1940.088181] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-97741c4d-af95-459b-839d-2cc3e8b7a41a tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Destroyed the VM [ 1940.088429] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-97741c4d-af95-459b-839d-2cc3e8b7a41a tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Deleting Snapshot of the VM instance {{(pid=63024) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1940.088713] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-25ce23ec-3e9e-410b-8a20-4082846c49e1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.097213] env[63024]: DEBUG oslo_vmware.api [None req-97741c4d-af95-459b-839d-2cc3e8b7a41a tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Waiting for the task: (returnval){ [ 1940.097213] env[63024]: value = "task-1951371" [ 1940.097213] env[63024]: _type = "Task" [ 1940.097213] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1940.114692] env[63024]: DEBUG oslo_vmware.api [None req-97741c4d-af95-459b-839d-2cc3e8b7a41a tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951371, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.144604] env[63024]: DEBUG nova.scheduler.client.report [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1940.156424] env[63024]: DEBUG oslo_vmware.api [None req-ba82cd5d-9aa6-4970-b1b2-1b173c8b0213 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951365, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.234547] env[63024]: DEBUG oslo_vmware.api [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951369, 'name': Rename_Task, 'duration_secs': 0.34652} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1940.234833] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1940.235124] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b224e89f-8df2-495b-905a-59c20429e564 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.243203] env[63024]: DEBUG oslo_vmware.api [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 1940.243203] env[63024]: value = "task-1951372" [ 1940.243203] env[63024]: _type = "Task" [ 1940.243203] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1940.253131] env[63024]: DEBUG oslo_vmware.api [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951372, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.370847] env[63024]: DEBUG oslo_vmware.api [None req-48e1ca48-b7bf-406a-9617-f8f25e1b770a tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951370, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.433609] env[63024]: DEBUG nova.compute.manager [req-bcd0b4a6-2d48-427c-9d0c-8352b525772e req-bc32ab87-3629-4f9d-a270-083cc525a502 service nova] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Received event network-changed-7d713c35-a0bd-4dd8-b1a9-b838b3bc7a46 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1940.433835] env[63024]: DEBUG nova.compute.manager [req-bcd0b4a6-2d48-427c-9d0c-8352b525772e req-bc32ab87-3629-4f9d-a270-083cc525a502 service nova] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Refreshing instance network info cache due to event network-changed-7d713c35-a0bd-4dd8-b1a9-b838b3bc7a46. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1940.434099] env[63024]: DEBUG oslo_concurrency.lockutils [req-bcd0b4a6-2d48-427c-9d0c-8352b525772e req-bc32ab87-3629-4f9d-a270-083cc525a502 service nova] Acquiring lock "refresh_cache-01b8072a-4483-4932-8294-7e5b48e6b203" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1940.434233] env[63024]: DEBUG oslo_concurrency.lockutils [req-bcd0b4a6-2d48-427c-9d0c-8352b525772e req-bc32ab87-3629-4f9d-a270-083cc525a502 service nova] Acquired lock "refresh_cache-01b8072a-4483-4932-8294-7e5b48e6b203" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1940.434403] env[63024]: DEBUG nova.network.neutron [req-bcd0b4a6-2d48-427c-9d0c-8352b525772e req-bc32ab87-3629-4f9d-a270-083cc525a502 service nova] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Refreshing network info cache for port 7d713c35-a0bd-4dd8-b1a9-b838b3bc7a46 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1940.558446] env[63024]: DEBUG nova.compute.manager [req-a25db58a-69df-4bc9-bc03-6b9837a61414 req-6d19d48b-31f7-416f-b61b-5a78d85a3113 service nova] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] Received event network-vif-deleted-d0439e29-9598-4648-991c-d2aff3b3fcf9 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1940.568310] env[63024]: DEBUG oslo_vmware.api [None req-b4a87a78-d330-4fd1-946b-a8894bc3f270 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951368, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.575491] env[63024]: DEBUG nova.compute.manager [None req-909f7370-194b-47ad-bdaf-14b2067cafeb tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1940.575645] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-909f7370-194b-47ad-bdaf-14b2067cafeb tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1940.576822] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b934568f-b354-4830-b45e-c06a056c7971 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.586804] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-909f7370-194b-47ad-bdaf-14b2067cafeb tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1940.587212] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b84f6b52-8ae0-4f8f-aecb-ad439551d2fe {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.597665] env[63024]: DEBUG oslo_vmware.api [None req-909f7370-194b-47ad-bdaf-14b2067cafeb tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Waiting for the task: (returnval){ [ 1940.597665] env[63024]: value = "task-1951373" [ 1940.597665] env[63024]: _type = "Task" [ 1940.597665] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1940.616328] env[63024]: DEBUG oslo_vmware.api [None req-909f7370-194b-47ad-bdaf-14b2067cafeb tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951373, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.621154] env[63024]: DEBUG oslo_vmware.api [None req-97741c4d-af95-459b-839d-2cc3e8b7a41a tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951371, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.650682] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.581s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1940.650891] env[63024]: INFO nova.compute.manager [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Migrating [ 1940.657430] env[63024]: DEBUG oslo_vmware.api [None req-ba82cd5d-9aa6-4970-b1b2-1b173c8b0213 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951365, 'name': ReconfigVM_Task, 'duration_secs': 1.920511} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1940.660618] env[63024]: DEBUG oslo_concurrency.lockutils [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.551s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1940.662185] env[63024]: INFO nova.compute.claims [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1940.665125] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba82cd5d-9aa6-4970-b1b2-1b173c8b0213 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Reconfigured VM instance instance-0000004d to attach disk [datastore1] volume-2cfc46b0-10e9-4f4c-8f58-7fff36954695/volume-2cfc46b0-10e9-4f4c-8f58-7fff36954695.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1940.675145] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2f86f147-1814-434c-a1fd-beab7115e95f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.696441] env[63024]: DEBUG oslo_vmware.api [None req-ba82cd5d-9aa6-4970-b1b2-1b173c8b0213 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1940.696441] env[63024]: value = "task-1951374" [ 1940.696441] env[63024]: _type = "Task" [ 1940.696441] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1940.709492] env[63024]: DEBUG oslo_vmware.api [None req-ba82cd5d-9aa6-4970-b1b2-1b173c8b0213 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951374, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.758941] env[63024]: DEBUG oslo_vmware.api [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951372, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1940.870365] env[63024]: DEBUG oslo_vmware.api [None req-48e1ca48-b7bf-406a-9617-f8f25e1b770a tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951370, 'name': PowerOffVM_Task, 'duration_secs': 0.794094} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1940.870536] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-48e1ca48-b7bf-406a-9617-f8f25e1b770a tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1940.870696] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-48e1ca48-b7bf-406a-9617-f8f25e1b770a tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1940.870958] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2d220358-47b3-47fa-b82f-a06938df0a53 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.060728] env[63024]: DEBUG oslo_vmware.api [None req-b4a87a78-d330-4fd1-946b-a8894bc3f270 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951368, 'name': CreateSnapshot_Task, 'duration_secs': 1.124619} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1941.061092] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b4a87a78-d330-4fd1-946b-a8894bc3f270 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Created Snapshot of the VM instance {{(pid=63024) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1941.061714] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-306bfc00-bff3-4dfa-b601-e3fae8b83a1e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.110693] env[63024]: DEBUG oslo_vmware.api [None req-909f7370-194b-47ad-bdaf-14b2067cafeb tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951373, 'name': PowerOffVM_Task, 'duration_secs': 0.254612} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1941.113835] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-909f7370-194b-47ad-bdaf-14b2067cafeb tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1941.114070] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-909f7370-194b-47ad-bdaf-14b2067cafeb tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1941.115155] env[63024]: DEBUG oslo_vmware.api [None req-97741c4d-af95-459b-839d-2cc3e8b7a41a tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951371, 'name': RemoveSnapshot_Task} progress is 76%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.115155] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f78f89fa-94af-4466-a335-c657c4b76af7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.188897] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "refresh_cache-9e8e7b6e-1bb2-4e66-b734-2f56e31302af" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1941.189162] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquired lock "refresh_cache-9e8e7b6e-1bb2-4e66-b734-2f56e31302af" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1941.189434] env[63024]: DEBUG nova.network.neutron [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1941.208371] env[63024]: DEBUG oslo_vmware.api [None req-ba82cd5d-9aa6-4970-b1b2-1b173c8b0213 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951374, 'name': ReconfigVM_Task, 'duration_secs': 0.178471} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1941.208371] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-ba82cd5d-9aa6-4970-b1b2-1b173c8b0213 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402188', 'volume_id': '2cfc46b0-10e9-4f4c-8f58-7fff36954695', 'name': 'volume-2cfc46b0-10e9-4f4c-8f58-7fff36954695', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c28e7c21-7e7d-4cda-81e8-63538bd8a1f7', 'attached_at': '', 'detached_at': '', 'volume_id': '2cfc46b0-10e9-4f4c-8f58-7fff36954695', 'serial': '2cfc46b0-10e9-4f4c-8f58-7fff36954695'} {{(pid=63024) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1941.240415] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-48e1ca48-b7bf-406a-9617-f8f25e1b770a tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1941.240713] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-48e1ca48-b7bf-406a-9617-f8f25e1b770a tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1941.240915] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-48e1ca48-b7bf-406a-9617-f8f25e1b770a tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Deleting the datastore file [datastore1] 669c45b0-34d6-45f8-a30e-b9b96cfd71ef {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1941.241747] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dd22525a-b8c0-4984-bce5-16ef41438177 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.254561] env[63024]: DEBUG oslo_vmware.api [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951372, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.255647] env[63024]: DEBUG oslo_vmware.api [None req-48e1ca48-b7bf-406a-9617-f8f25e1b770a tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Waiting for the task: (returnval){ [ 1941.255647] env[63024]: value = "task-1951377" [ 1941.255647] env[63024]: _type = "Task" [ 1941.255647] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1941.264839] env[63024]: DEBUG oslo_vmware.api [None req-48e1ca48-b7bf-406a-9617-f8f25e1b770a tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951377, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.325891] env[63024]: DEBUG oslo_concurrency.lockutils [None req-696487c3-f23e-4aca-aa6c-e12e93748e09 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "interface-9a7f4452-ae50-4779-8474-11d3a6d3533f-42f23b07-4f23-454c-bdba-e075cd549205" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1941.326194] env[63024]: DEBUG oslo_concurrency.lockutils [None req-696487c3-f23e-4aca-aa6c-e12e93748e09 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "interface-9a7f4452-ae50-4779-8474-11d3a6d3533f-42f23b07-4f23-454c-bdba-e075cd549205" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1941.327661] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-909f7370-194b-47ad-bdaf-14b2067cafeb tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1941.331019] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-909f7370-194b-47ad-bdaf-14b2067cafeb tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1941.331019] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-909f7370-194b-47ad-bdaf-14b2067cafeb tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Deleting the datastore file [datastore1] 96afa44e-d8c6-419c-ae69-04b7b306c2c5 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1941.331019] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e73e3d91-653f-4b95-abdd-b1ff0fa1389e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.336971] env[63024]: DEBUG oslo_vmware.api [None req-909f7370-194b-47ad-bdaf-14b2067cafeb tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Waiting for the task: (returnval){ [ 1941.336971] env[63024]: value = "task-1951378" [ 1941.336971] env[63024]: _type = "Task" [ 1941.336971] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1941.354100] env[63024]: DEBUG oslo_vmware.api [None req-909f7370-194b-47ad-bdaf-14b2067cafeb tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951378, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.389810] env[63024]: DEBUG nova.network.neutron [req-bcd0b4a6-2d48-427c-9d0c-8352b525772e req-bc32ab87-3629-4f9d-a270-083cc525a502 service nova] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Updated VIF entry in instance network info cache for port 7d713c35-a0bd-4dd8-b1a9-b838b3bc7a46. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1941.390308] env[63024]: DEBUG nova.network.neutron [req-bcd0b4a6-2d48-427c-9d0c-8352b525772e req-bc32ab87-3629-4f9d-a270-083cc525a502 service nova] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Updating instance_info_cache with network_info: [{"id": "7d713c35-a0bd-4dd8-b1a9-b838b3bc7a46", "address": "fa:16:3e:9e:4f:a9", "network": {"id": "20e77237-56c4-40bb-8203-aa035239c938", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1073367986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.156", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12d782556c614caf84a51b37fa43b5de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a64108f9-df0a-4feb-bbb5-97f5841c356c", "external-id": "nsx-vlan-transportzone-67", "segmentation_id": 67, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d713c35-a0", "ovs_interfaceid": "7d713c35-a0bd-4dd8-b1a9-b838b3bc7a46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1941.580382] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b4a87a78-d330-4fd1-946b-a8894bc3f270 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Creating linked-clone VM from snapshot {{(pid=63024) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1941.580811] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-e2c54c86-6455-4db5-a078-442be7f7d9e6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.592399] env[63024]: DEBUG oslo_vmware.api [None req-b4a87a78-d330-4fd1-946b-a8894bc3f270 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 1941.592399] env[63024]: value = "task-1951379" [ 1941.592399] env[63024]: _type = "Task" [ 1941.592399] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1941.601232] env[63024]: DEBUG oslo_vmware.api [None req-b4a87a78-d330-4fd1-946b-a8894bc3f270 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951379, 'name': CloneVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.609229] env[63024]: DEBUG oslo_vmware.api [None req-97741c4d-af95-459b-839d-2cc3e8b7a41a tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951371, 'name': RemoveSnapshot_Task, 'duration_secs': 1.106076} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1941.609529] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-97741c4d-af95-459b-839d-2cc3e8b7a41a tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Deleted Snapshot of the VM instance {{(pid=63024) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1941.769058] env[63024]: DEBUG oslo_vmware.api [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951372, 'name': PowerOnVM_Task, 'duration_secs': 1.168203} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1941.777129] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1941.777371] env[63024]: INFO nova.compute.manager [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Took 12.08 seconds to spawn the instance on the hypervisor. [ 1941.781286] env[63024]: DEBUG nova.compute.manager [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1941.781545] env[63024]: DEBUG oslo_vmware.api [None req-48e1ca48-b7bf-406a-9617-f8f25e1b770a tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951377, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.177055} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1941.783848] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d8962d5-72f0-4a5d-b321-d963698ce360 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.787852] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-48e1ca48-b7bf-406a-9617-f8f25e1b770a tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1941.787852] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-48e1ca48-b7bf-406a-9617-f8f25e1b770a tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1941.787852] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-48e1ca48-b7bf-406a-9617-f8f25e1b770a tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1941.787852] env[63024]: INFO nova.compute.manager [None req-48e1ca48-b7bf-406a-9617-f8f25e1b770a tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] Took 1.96 seconds to destroy the instance on the hypervisor. [ 1941.787852] env[63024]: DEBUG oslo.service.loopingcall [None req-48e1ca48-b7bf-406a-9617-f8f25e1b770a tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1941.787852] env[63024]: DEBUG nova.compute.manager [-] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1941.787852] env[63024]: DEBUG nova.network.neutron [-] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1941.831479] env[63024]: DEBUG oslo_concurrency.lockutils [None req-696487c3-f23e-4aca-aa6c-e12e93748e09 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "9a7f4452-ae50-4779-8474-11d3a6d3533f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1941.831675] env[63024]: DEBUG oslo_concurrency.lockutils [None req-696487c3-f23e-4aca-aa6c-e12e93748e09 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquired lock "9a7f4452-ae50-4779-8474-11d3a6d3533f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1941.832721] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25c8a033-ce1b-4e77-88ca-9f6736929af5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.869220] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc01a73f-1df3-41b1-9896-1ba704e21384 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.880134] env[63024]: DEBUG oslo_vmware.api [None req-909f7370-194b-47ad-bdaf-14b2067cafeb tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Task: {'id': task-1951378, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.181653} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1941.900275] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-909f7370-194b-47ad-bdaf-14b2067cafeb tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1941.900663] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-909f7370-194b-47ad-bdaf-14b2067cafeb tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1941.900725] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-909f7370-194b-47ad-bdaf-14b2067cafeb tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1941.900853] env[63024]: INFO nova.compute.manager [None req-909f7370-194b-47ad-bdaf-14b2067cafeb tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Took 1.33 seconds to destroy the instance on the hypervisor. [ 1941.901309] env[63024]: DEBUG oslo.service.loopingcall [None req-909f7370-194b-47ad-bdaf-14b2067cafeb tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1941.901937] env[63024]: DEBUG oslo_concurrency.lockutils [req-bcd0b4a6-2d48-427c-9d0c-8352b525772e req-bc32ab87-3629-4f9d-a270-083cc525a502 service nova] Releasing lock "refresh_cache-01b8072a-4483-4932-8294-7e5b48e6b203" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1941.907677] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-696487c3-f23e-4aca-aa6c-e12e93748e09 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Reconfiguring VM to detach interface {{(pid=63024) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1941.912470] env[63024]: DEBUG nova.compute.manager [-] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1941.912570] env[63024]: DEBUG nova.network.neutron [-] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1941.914062] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-24b1af1a-5a70-44c8-81fb-dfe208ca3834 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.942118] env[63024]: DEBUG oslo_vmware.api [None req-696487c3-f23e-4aca-aa6c-e12e93748e09 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 1941.942118] env[63024]: value = "task-1951380" [ 1941.942118] env[63024]: _type = "Task" [ 1941.942118] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1941.951603] env[63024]: DEBUG oslo_vmware.api [None req-696487c3-f23e-4aca-aa6c-e12e93748e09 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951380, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1942.108107] env[63024]: DEBUG oslo_vmware.api [None req-b4a87a78-d330-4fd1-946b-a8894bc3f270 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951379, 'name': CloneVM_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1942.114683] env[63024]: WARNING nova.compute.manager [None req-97741c4d-af95-459b-839d-2cc3e8b7a41a tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Image not found during snapshot: nova.exception.ImageNotFound: Image 40f8ab95-7d6f-4198-8361-af5878051d90 could not be found. [ 1942.259246] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-144aeadc-9fa7-4e9a-aed9-19c75f20742a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.268351] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18cf2beb-8a42-4cd5-b091-ab34df54f773 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.300572] env[63024]: DEBUG nova.objects.instance [None req-ba82cd5d-9aa6-4970-b1b2-1b173c8b0213 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lazy-loading 'flavor' on Instance uuid c28e7c21-7e7d-4cda-81e8-63538bd8a1f7 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1942.302963] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93094654-370a-4845-bfab-46ccf7f27ea7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.314897] env[63024]: INFO nova.compute.manager [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Took 35.58 seconds to build instance. [ 1942.318973] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac1a74fa-ffb5-442b-88cb-a00a3bd7f02f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.338712] env[63024]: DEBUG nova.compute.provider_tree [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1942.440963] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9cd6bf9b-c70f-423e-ad10-787b9b139939 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Acquiring lock "77c27741-ee3a-4a8b-bbd3-89759288f7c6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1942.441365] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9cd6bf9b-c70f-423e-ad10-787b9b139939 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Lock "77c27741-ee3a-4a8b-bbd3-89759288f7c6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1942.441487] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9cd6bf9b-c70f-423e-ad10-787b9b139939 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Acquiring lock "77c27741-ee3a-4a8b-bbd3-89759288f7c6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1942.441675] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9cd6bf9b-c70f-423e-ad10-787b9b139939 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Lock "77c27741-ee3a-4a8b-bbd3-89759288f7c6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1942.442052] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9cd6bf9b-c70f-423e-ad10-787b9b139939 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Lock "77c27741-ee3a-4a8b-bbd3-89759288f7c6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1942.450807] env[63024]: INFO nova.compute.manager [None req-9cd6bf9b-c70f-423e-ad10-787b9b139939 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Terminating instance [ 1942.466701] env[63024]: DEBUG oslo_vmware.api [None req-696487c3-f23e-4aca-aa6c-e12e93748e09 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951380, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1942.577413] env[63024]: DEBUG nova.network.neutron [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Updating instance_info_cache with network_info: [{"id": "6aa34054-6865-4348-9871-fd32c747ab34", "address": "fa:16:3e:82:1f:7b", "network": {"id": "67ab6461-8fa5-4dfd-9267-561a710ae91b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1814728209-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1886be852b01400aaf7a31c8fe5d4d7a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6aa34054-68", "ovs_interfaceid": "6aa34054-6865-4348-9871-fd32c747ab34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1942.605518] env[63024]: DEBUG oslo_vmware.api [None req-b4a87a78-d330-4fd1-946b-a8894bc3f270 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951379, 'name': CloneVM_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1942.747690] env[63024]: DEBUG nova.network.neutron [-] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1942.805351] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ba82cd5d-9aa6-4970-b1b2-1b173c8b0213 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "c28e7c21-7e7d-4cda-81e8-63538bd8a1f7" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 10.924s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1942.824024] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7801fe62-d802-41e2-9441-56c0cc59eef3 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "ea24d375-ba88-42ca-a07e-52000ec613c0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.098s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1942.863852] env[63024]: ERROR nova.scheduler.client.report [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [req-a0db8b33-fcd2-491d-9a13-ba57b5a0f354] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 89dfa68a-133e-436f-a9f1-86051f9fb96b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-a0db8b33-fcd2-491d-9a13-ba57b5a0f354"}]} [ 1942.884526] env[63024]: DEBUG nova.scheduler.client.report [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Refreshing inventories for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 1942.899610] env[63024]: DEBUG nova.scheduler.client.report [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Updating ProviderTree inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 1942.899848] env[63024]: DEBUG nova.compute.provider_tree [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 168, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1942.913165] env[63024]: DEBUG nova.scheduler.client.report [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Refreshing aggregate associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, aggregates: None {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 1942.938532] env[63024]: DEBUG nova.scheduler.client.report [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Refreshing trait associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 1942.953482] env[63024]: DEBUG nova.compute.manager [req-782b0588-80e4-4c07-abae-4120d9ef0753 req-7433ab6c-b8b8-4a5b-834a-ea8ab6098d6e service nova] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Received event network-changed-7d713c35-a0bd-4dd8-b1a9-b838b3bc7a46 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1942.953482] env[63024]: DEBUG nova.compute.manager [req-782b0588-80e4-4c07-abae-4120d9ef0753 req-7433ab6c-b8b8-4a5b-834a-ea8ab6098d6e service nova] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Refreshing instance network info cache due to event network-changed-7d713c35-a0bd-4dd8-b1a9-b838b3bc7a46. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1942.953482] env[63024]: DEBUG oslo_concurrency.lockutils [req-782b0588-80e4-4c07-abae-4120d9ef0753 req-7433ab6c-b8b8-4a5b-834a-ea8ab6098d6e service nova] Acquiring lock "refresh_cache-01b8072a-4483-4932-8294-7e5b48e6b203" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1942.953592] env[63024]: DEBUG oslo_concurrency.lockutils [req-782b0588-80e4-4c07-abae-4120d9ef0753 req-7433ab6c-b8b8-4a5b-834a-ea8ab6098d6e service nova] Acquired lock "refresh_cache-01b8072a-4483-4932-8294-7e5b48e6b203" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1942.954598] env[63024]: DEBUG nova.network.neutron [req-782b0588-80e4-4c07-abae-4120d9ef0753 req-7433ab6c-b8b8-4a5b-834a-ea8ab6098d6e service nova] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Refreshing network info cache for port 7d713c35-a0bd-4dd8-b1a9-b838b3bc7a46 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1942.960469] env[63024]: DEBUG nova.compute.manager [None req-9cd6bf9b-c70f-423e-ad10-787b9b139939 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1942.960550] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9cd6bf9b-c70f-423e-ad10-787b9b139939 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1942.961034] env[63024]: DEBUG oslo_vmware.api [None req-696487c3-f23e-4aca-aa6c-e12e93748e09 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951380, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1942.962202] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cbff95c-9643-46e9-beed-2572b1ede389 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.975788] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cd6bf9b-c70f-423e-ad10-787b9b139939 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1942.975788] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d09907ba-b68a-4259-8527-f9969d44da07 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.981809] env[63024]: DEBUG oslo_vmware.api [None req-9cd6bf9b-c70f-423e-ad10-787b9b139939 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Waiting for the task: (returnval){ [ 1942.981809] env[63024]: value = "task-1951381" [ 1942.981809] env[63024]: _type = "Task" [ 1942.981809] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1942.994474] env[63024]: DEBUG oslo_vmware.api [None req-9cd6bf9b-c70f-423e-ad10-787b9b139939 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Task: {'id': task-1951381, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.081893] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Releasing lock "refresh_cache-9e8e7b6e-1bb2-4e66-b734-2f56e31302af" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1943.115055] env[63024]: DEBUG oslo_vmware.api [None req-b4a87a78-d330-4fd1-946b-a8894bc3f270 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951379, 'name': CloneVM_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.251191] env[63024]: INFO nova.compute.manager [-] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] Took 1.46 seconds to deallocate network for instance. [ 1943.261558] env[63024]: DEBUG nova.network.neutron [-] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1943.294061] env[63024]: DEBUG nova.compute.manager [req-ee1f4118-62dd-4415-8dbb-57d79d75df6d req-b03e1645-05e3-476a-854d-38f5074a0da3 service nova] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Received event network-vif-deleted-14537c97-623f-4d93-80a9-8cd7457a0a75 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1943.296243] env[63024]: INFO nova.compute.manager [req-ee1f4118-62dd-4415-8dbb-57d79d75df6d req-b03e1645-05e3-476a-854d-38f5074a0da3 service nova] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Neutron deleted interface 14537c97-623f-4d93-80a9-8cd7457a0a75; detaching it from the instance and deleting it from the info cache [ 1943.296243] env[63024]: DEBUG nova.network.neutron [req-ee1f4118-62dd-4415-8dbb-57d79d75df6d req-b03e1645-05e3-476a-854d-38f5074a0da3 service nova] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1943.463046] env[63024]: DEBUG oslo_vmware.api [None req-696487c3-f23e-4aca-aa6c-e12e93748e09 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951380, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.489858] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0649b10c-2d7d-4bd3-9f72-b23e3cd41b13 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.499862] env[63024]: DEBUG oslo_vmware.api [None req-9cd6bf9b-c70f-423e-ad10-787b9b139939 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Task: {'id': task-1951381, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.504974] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c90dfc01-6e17-4518-a99c-bf73e9b233dc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.545190] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-274de703-dcf9-48c1-a98d-872f15e4d4fa {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.554481] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de8ef52f-024e-42ab-979b-9793ae547683 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.573261] env[63024]: DEBUG nova.compute.provider_tree [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1943.614054] env[63024]: DEBUG oslo_vmware.api [None req-b4a87a78-d330-4fd1-946b-a8894bc3f270 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951379, 'name': CloneVM_Task} progress is 95%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.766197] env[63024]: DEBUG oslo_concurrency.lockutils [None req-48e1ca48-b7bf-406a-9617-f8f25e1b770a tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1943.766800] env[63024]: INFO nova.compute.manager [-] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Took 1.85 seconds to deallocate network for instance. [ 1943.790466] env[63024]: INFO nova.compute.manager [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Rebuilding instance [ 1943.798297] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6716bea1-a8c9-4b6b-bf15-50e1336ac197 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.812488] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7510d658-07c0-4f69-85ea-31354ea73c42 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.862163] env[63024]: DEBUG nova.compute.manager [req-ee1f4118-62dd-4415-8dbb-57d79d75df6d req-b03e1645-05e3-476a-854d-38f5074a0da3 service nova] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Detach interface failed, port_id=14537c97-623f-4d93-80a9-8cd7457a0a75, reason: Instance 96afa44e-d8c6-419c-ae69-04b7b306c2c5 could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 1943.870289] env[63024]: DEBUG nova.compute.manager [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1943.871219] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdc72638-0537-44aa-a8b3-afc94323a12e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.949868] env[63024]: DEBUG oslo_concurrency.lockutils [None req-71fd91f5-f16c-47ad-b8c3-fecbda613c32 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Acquiring lock "6e0aa58b-85e0-4e74-812f-cc01041ed6d3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1943.950183] env[63024]: DEBUG oslo_concurrency.lockutils [None req-71fd91f5-f16c-47ad-b8c3-fecbda613c32 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Lock "6e0aa58b-85e0-4e74-812f-cc01041ed6d3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1943.950511] env[63024]: DEBUG oslo_concurrency.lockutils [None req-71fd91f5-f16c-47ad-b8c3-fecbda613c32 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Acquiring lock "6e0aa58b-85e0-4e74-812f-cc01041ed6d3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1943.950724] env[63024]: DEBUG oslo_concurrency.lockutils [None req-71fd91f5-f16c-47ad-b8c3-fecbda613c32 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Lock "6e0aa58b-85e0-4e74-812f-cc01041ed6d3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1943.950887] env[63024]: DEBUG oslo_concurrency.lockutils [None req-71fd91f5-f16c-47ad-b8c3-fecbda613c32 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Lock "6e0aa58b-85e0-4e74-812f-cc01041ed6d3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1943.953475] env[63024]: INFO nova.compute.manager [None req-71fd91f5-f16c-47ad-b8c3-fecbda613c32 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Terminating instance [ 1943.960475] env[63024]: DEBUG oslo_vmware.api [None req-696487c3-f23e-4aca-aa6c-e12e93748e09 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951380, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1943.994671] env[63024]: DEBUG oslo_vmware.api [None req-9cd6bf9b-c70f-423e-ad10-787b9b139939 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Task: {'id': task-1951381, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.110551] env[63024]: DEBUG nova.scheduler.client.report [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Updated inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with generation 128 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 1944.110796] env[63024]: DEBUG nova.compute.provider_tree [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Updating resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b generation from 128 to 129 during operation: update_inventory {{(pid=63024) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1944.110909] env[63024]: DEBUG nova.compute.provider_tree [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1944.119016] env[63024]: DEBUG oslo_vmware.api [None req-b4a87a78-d330-4fd1-946b-a8894bc3f270 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951379, 'name': CloneVM_Task, 'duration_secs': 2.412502} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1944.119583] env[63024]: INFO nova.virt.vmwareapi.vmops [None req-b4a87a78-d330-4fd1-946b-a8894bc3f270 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Created linked-clone VM from snapshot [ 1944.120386] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d507436-9f75-4e66-abad-0e4c479ecd09 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.131045] env[63024]: DEBUG nova.virt.vmwareapi.images [None req-b4a87a78-d330-4fd1-946b-a8894bc3f270 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Uploading image 28dd7777-46b4-4ea5-820b-4e139758df9b {{(pid=63024) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1944.162241] env[63024]: DEBUG oslo_vmware.rw_handles [None req-b4a87a78-d330-4fd1-946b-a8894bc3f270 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1944.162241] env[63024]: value = "vm-402192" [ 1944.162241] env[63024]: _type = "VirtualMachine" [ 1944.162241] env[63024]: }. {{(pid=63024) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1944.162241] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-b3149213-0c45-4077-99ef-e26dfbe3ea2f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.170977] env[63024]: DEBUG oslo_vmware.rw_handles [None req-b4a87a78-d330-4fd1-946b-a8894bc3f270 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lease: (returnval){ [ 1944.170977] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]521d1151-1e91-eeb5-0694-ee56dc54bac5" [ 1944.170977] env[63024]: _type = "HttpNfcLease" [ 1944.170977] env[63024]: } obtained for exporting VM: (result){ [ 1944.170977] env[63024]: value = "vm-402192" [ 1944.170977] env[63024]: _type = "VirtualMachine" [ 1944.170977] env[63024]: }. {{(pid=63024) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1944.171679] env[63024]: DEBUG oslo_vmware.api [None req-b4a87a78-d330-4fd1-946b-a8894bc3f270 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the lease: (returnval){ [ 1944.171679] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]521d1151-1e91-eeb5-0694-ee56dc54bac5" [ 1944.171679] env[63024]: _type = "HttpNfcLease" [ 1944.171679] env[63024]: } to be ready. {{(pid=63024) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1944.180874] env[63024]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1944.180874] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]521d1151-1e91-eeb5-0694-ee56dc54bac5" [ 1944.180874] env[63024]: _type = "HttpNfcLease" [ 1944.180874] env[63024]: } is initializing. {{(pid=63024) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1944.203741] env[63024]: DEBUG nova.network.neutron [req-782b0588-80e4-4c07-abae-4120d9ef0753 req-7433ab6c-b8b8-4a5b-834a-ea8ab6098d6e service nova] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Updated VIF entry in instance network info cache for port 7d713c35-a0bd-4dd8-b1a9-b838b3bc7a46. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1944.204196] env[63024]: DEBUG nova.network.neutron [req-782b0588-80e4-4c07-abae-4120d9ef0753 req-7433ab6c-b8b8-4a5b-834a-ea8ab6098d6e service nova] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Updating instance_info_cache with network_info: [{"id": "7d713c35-a0bd-4dd8-b1a9-b838b3bc7a46", "address": "fa:16:3e:9e:4f:a9", "network": {"id": "20e77237-56c4-40bb-8203-aa035239c938", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1073367986-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.156", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "12d782556c614caf84a51b37fa43b5de", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a64108f9-df0a-4feb-bbb5-97f5841c356c", "external-id": "nsx-vlan-transportzone-67", "segmentation_id": 67, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7d713c35-a0", "ovs_interfaceid": "7d713c35-a0bd-4dd8-b1a9-b838b3bc7a46", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1944.276581] env[63024]: DEBUG oslo_concurrency.lockutils [None req-909f7370-194b-47ad-bdaf-14b2067cafeb tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1944.468963] env[63024]: DEBUG nova.compute.manager [None req-71fd91f5-f16c-47ad-b8c3-fecbda613c32 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1944.468963] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-71fd91f5-f16c-47ad-b8c3-fecbda613c32 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1944.468963] env[63024]: DEBUG oslo_vmware.api [None req-696487c3-f23e-4aca-aa6c-e12e93748e09 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951380, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.470430] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8fc94ef-ddaf-464a-adde-81b48ef07a9e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.481212] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-71fd91f5-f16c-47ad-b8c3-fecbda613c32 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1944.481744] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f7bacf88-7280-4f57-a49b-2250c4b0b99e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.499650] env[63024]: DEBUG oslo_vmware.api [None req-9cd6bf9b-c70f-423e-ad10-787b9b139939 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Task: {'id': task-1951381, 'name': PowerOffVM_Task, 'duration_secs': 1.246261} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1944.501616] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cd6bf9b-c70f-423e-ad10-787b9b139939 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1944.501873] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9cd6bf9b-c70f-423e-ad10-787b9b139939 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1944.502225] env[63024]: DEBUG oslo_vmware.api [None req-71fd91f5-f16c-47ad-b8c3-fecbda613c32 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Waiting for the task: (returnval){ [ 1944.502225] env[63024]: value = "task-1951383" [ 1944.502225] env[63024]: _type = "Task" [ 1944.502225] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1944.502493] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-abb853ba-6c9e-4a92-959c-5ea3de9b315e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.519376] env[63024]: DEBUG oslo_vmware.api [None req-71fd91f5-f16c-47ad-b8c3-fecbda613c32 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951383, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.599841] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e26daf0-778c-4998-b024-6d279b3b890f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.623962] env[63024]: DEBUG oslo_concurrency.lockutils [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.963s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1944.624536] env[63024]: DEBUG nova.compute.manager [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1944.628103] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Updating instance '9e8e7b6e-1bb2-4e66-b734-2f56e31302af' progress to 0 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1944.637026] env[63024]: DEBUG oslo_concurrency.lockutils [None req-754cf9a4-b4ef-4ef4-aa73-f36ae6252b97 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.139s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1944.637026] env[63024]: DEBUG oslo_concurrency.lockutils [None req-754cf9a4-b4ef-4ef4-aa73-f36ae6252b97 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1944.637026] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 7.312s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1944.637026] env[63024]: DEBUG nova.objects.instance [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63024) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1944.644504] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9cd6bf9b-c70f-423e-ad10-787b9b139939 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1944.644745] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9cd6bf9b-c70f-423e-ad10-787b9b139939 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1944.644988] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cd6bf9b-c70f-423e-ad10-787b9b139939 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Deleting the datastore file [datastore1] 77c27741-ee3a-4a8b-bbd3-89759288f7c6 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1944.645388] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c952b2f4-8a8a-48b2-a06f-fb7f6363c5cf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.656984] env[63024]: DEBUG oslo_vmware.api [None req-9cd6bf9b-c70f-423e-ad10-787b9b139939 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Waiting for the task: (returnval){ [ 1944.656984] env[63024]: value = "task-1951385" [ 1944.656984] env[63024]: _type = "Task" [ 1944.656984] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1944.669847] env[63024]: DEBUG oslo_vmware.api [None req-9cd6bf9b-c70f-423e-ad10-787b9b139939 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Task: {'id': task-1951385, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.671317] env[63024]: INFO nova.scheduler.client.report [None req-754cf9a4-b4ef-4ef4-aa73-f36ae6252b97 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Deleted allocations for instance a0a9ea07-dda8-45b4-bab9-cdaf683c0a21 [ 1944.684239] env[63024]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1944.684239] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]521d1151-1e91-eeb5-0694-ee56dc54bac5" [ 1944.684239] env[63024]: _type = "HttpNfcLease" [ 1944.684239] env[63024]: } is ready. {{(pid=63024) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1944.685193] env[63024]: DEBUG oslo_vmware.rw_handles [None req-b4a87a78-d330-4fd1-946b-a8894bc3f270 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1944.685193] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]521d1151-1e91-eeb5-0694-ee56dc54bac5" [ 1944.685193] env[63024]: _type = "HttpNfcLease" [ 1944.685193] env[63024]: }. {{(pid=63024) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1944.685605] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f14c34d4-3a86-4032-9fee-2080a0aeb81d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.698576] env[63024]: DEBUG oslo_vmware.rw_handles [None req-b4a87a78-d330-4fd1-946b-a8894bc3f270 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e80921-29b9-e77e-3ec2-75a3eed773ad/disk-0.vmdk from lease info. {{(pid=63024) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1944.698833] env[63024]: DEBUG oslo_vmware.rw_handles [None req-b4a87a78-d330-4fd1-946b-a8894bc3f270 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e80921-29b9-e77e-3ec2-75a3eed773ad/disk-0.vmdk for reading. {{(pid=63024) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1944.761616] env[63024]: DEBUG oslo_concurrency.lockutils [req-782b0588-80e4-4c07-abae-4120d9ef0753 req-7433ab6c-b8b8-4a5b-834a-ea8ab6098d6e service nova] Releasing lock "refresh_cache-01b8072a-4483-4932-8294-7e5b48e6b203" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1944.761911] env[63024]: DEBUG nova.compute.manager [req-782b0588-80e4-4c07-abae-4120d9ef0753 req-7433ab6c-b8b8-4a5b-834a-ea8ab6098d6e service nova] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] Received event network-vif-deleted-6bc8e59c-a429-442e-a0c8-d59867ad2ad9 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1944.853861] env[63024]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-3e4626e7-50b2-4076-945d-1ebb618e3d6b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.890430] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1944.890721] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f74a944e-0fac-42b1-bece-f56deab5c498 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.903828] env[63024]: DEBUG oslo_vmware.api [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1944.903828] env[63024]: value = "task-1951386" [ 1944.903828] env[63024]: _type = "Task" [ 1944.903828] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1944.913364] env[63024]: DEBUG oslo_vmware.api [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951386, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.961248] env[63024]: DEBUG oslo_vmware.api [None req-696487c3-f23e-4aca-aa6c-e12e93748e09 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951380, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1945.019022] env[63024]: DEBUG oslo_vmware.api [None req-71fd91f5-f16c-47ad-b8c3-fecbda613c32 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951383, 'name': PowerOffVM_Task, 'duration_secs': 0.2659} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1945.019022] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-71fd91f5-f16c-47ad-b8c3-fecbda613c32 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1945.019022] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-71fd91f5-f16c-47ad-b8c3-fecbda613c32 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1945.019300] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a3bca018-4c1b-4464-bc5c-a13b212a230e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.137471] env[63024]: DEBUG nova.compute.utils [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1945.141793] env[63024]: DEBUG nova.compute.manager [req-ad7c9697-b6da-4869-ba83-d42d6838b514 req-fc71cabd-ecb9-4feb-b3ae-7490537b9f1f service nova] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Received event network-changed-95e5d41a-5998-4d48-9aec-6255c74c448f {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1945.142923] env[63024]: DEBUG nova.compute.manager [req-ad7c9697-b6da-4869-ba83-d42d6838b514 req-fc71cabd-ecb9-4feb-b3ae-7490537b9f1f service nova] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Refreshing instance network info cache due to event network-changed-95e5d41a-5998-4d48-9aec-6255c74c448f. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1945.142923] env[63024]: DEBUG oslo_concurrency.lockutils [req-ad7c9697-b6da-4869-ba83-d42d6838b514 req-fc71cabd-ecb9-4feb-b3ae-7490537b9f1f service nova] Acquiring lock "refresh_cache-ea24d375-ba88-42ca-a07e-52000ec613c0" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1945.142923] env[63024]: DEBUG oslo_concurrency.lockutils [req-ad7c9697-b6da-4869-ba83-d42d6838b514 req-fc71cabd-ecb9-4feb-b3ae-7490537b9f1f service nova] Acquired lock "refresh_cache-ea24d375-ba88-42ca-a07e-52000ec613c0" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1945.143090] env[63024]: DEBUG nova.network.neutron [req-ad7c9697-b6da-4869-ba83-d42d6838b514 req-fc71cabd-ecb9-4feb-b3ae-7490537b9f1f service nova] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Refreshing network info cache for port 95e5d41a-5998-4d48-9aec-6255c74c448f {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1945.145715] env[63024]: DEBUG nova.compute.manager [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1945.146110] env[63024]: DEBUG nova.network.neutron [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1945.152310] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1945.157948] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7018f76c-31e3-414b-b4c7-76da8478eae5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.171896] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-71fd91f5-f16c-47ad-b8c3-fecbda613c32 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1945.176020] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-71fd91f5-f16c-47ad-b8c3-fecbda613c32 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1945.176020] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-71fd91f5-f16c-47ad-b8c3-fecbda613c32 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Deleting the datastore file [datastore1] 6e0aa58b-85e0-4e74-812f-cc01041ed6d3 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1945.176020] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-68d95a09-12f7-4207-8a33-016ed5f9c289 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1945.188402] env[63024]: DEBUG oslo_vmware.api [None req-9cd6bf9b-c70f-423e-ad10-787b9b139939 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Task: {'id': task-1951385, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.18016} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1945.189145] env[63024]: DEBUG oslo_vmware.api [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1945.189145] env[63024]: value = "task-1951388" [ 1945.189145] env[63024]: _type = "Task" [ 1945.189145] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1945.189825] env[63024]: DEBUG oslo_concurrency.lockutils [None req-754cf9a4-b4ef-4ef4-aa73-f36ae6252b97 tempest-ImagesTestJSON-1301675684 tempest-ImagesTestJSON-1301675684-project-member] Lock "a0a9ea07-dda8-45b4-bab9-cdaf683c0a21" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.905s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1945.192436] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cd6bf9b-c70f-423e-ad10-787b9b139939 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1945.192696] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9cd6bf9b-c70f-423e-ad10-787b9b139939 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1945.193014] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9cd6bf9b-c70f-423e-ad10-787b9b139939 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1945.195724] env[63024]: INFO nova.compute.manager [None req-9cd6bf9b-c70f-423e-ad10-787b9b139939 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Took 2.24 seconds to destroy the instance on the hypervisor. [ 1945.196354] env[63024]: DEBUG oslo.service.loopingcall [None req-9cd6bf9b-c70f-423e-ad10-787b9b139939 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1945.197781] env[63024]: DEBUG nova.compute.manager [-] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1945.197990] env[63024]: DEBUG nova.network.neutron [-] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1945.207815] env[63024]: DEBUG oslo_vmware.api [None req-71fd91f5-f16c-47ad-b8c3-fecbda613c32 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Waiting for the task: (returnval){ [ 1945.207815] env[63024]: value = "task-1951389" [ 1945.207815] env[63024]: _type = "Task" [ 1945.207815] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1945.219026] env[63024]: DEBUG oslo_vmware.api [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951388, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1945.227296] env[63024]: DEBUG oslo_vmware.api [None req-71fd91f5-f16c-47ad-b8c3-fecbda613c32 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951389, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1945.301140] env[63024]: DEBUG nova.policy [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '893bfe0d8eef423aae6c7eb5cdc1a9e9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '18540818b60e4483963d14559bc5c38d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1945.414828] env[63024]: DEBUG oslo_vmware.api [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951386, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1945.462733] env[63024]: DEBUG oslo_vmware.api [None req-696487c3-f23e-4aca-aa6c-e12e93748e09 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951380, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1945.646689] env[63024]: DEBUG nova.compute.manager [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1945.662774] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e6843b54-4781-4020-97cf-532bc5d3ce59 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.027s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1945.665470] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.501s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1945.665900] env[63024]: INFO nova.compute.claims [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1945.708524] env[63024]: DEBUG oslo_vmware.api [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951388, 'name': PowerOffVM_Task, 'duration_secs': 0.312822} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1945.708887] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1945.709108] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Updating instance '9e8e7b6e-1bb2-4e66-b734-2f56e31302af' progress to 17 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1945.724382] env[63024]: DEBUG oslo_vmware.api [None req-71fd91f5-f16c-47ad-b8c3-fecbda613c32 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951389, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157744} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1945.724783] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-71fd91f5-f16c-47ad-b8c3-fecbda613c32 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1945.725037] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-71fd91f5-f16c-47ad-b8c3-fecbda613c32 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1945.725230] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-71fd91f5-f16c-47ad-b8c3-fecbda613c32 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1945.725464] env[63024]: INFO nova.compute.manager [None req-71fd91f5-f16c-47ad-b8c3-fecbda613c32 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Took 1.26 seconds to destroy the instance on the hypervisor. [ 1945.725848] env[63024]: DEBUG oslo.service.loopingcall [None req-71fd91f5-f16c-47ad-b8c3-fecbda613c32 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1945.726128] env[63024]: DEBUG nova.compute.manager [-] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1945.726236] env[63024]: DEBUG nova.network.neutron [-] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1945.916641] env[63024]: DEBUG oslo_vmware.api [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951386, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1945.948100] env[63024]: DEBUG nova.network.neutron [req-ad7c9697-b6da-4869-ba83-d42d6838b514 req-fc71cabd-ecb9-4feb-b3ae-7490537b9f1f service nova] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Updated VIF entry in instance network info cache for port 95e5d41a-5998-4d48-9aec-6255c74c448f. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1945.948463] env[63024]: DEBUG nova.network.neutron [req-ad7c9697-b6da-4869-ba83-d42d6838b514 req-fc71cabd-ecb9-4feb-b3ae-7490537b9f1f service nova] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Updating instance_info_cache with network_info: [{"id": "95e5d41a-5998-4d48-9aec-6255c74c448f", "address": "fa:16:3e:c2:99:b7", "network": {"id": "18684658-e754-4649-b059-43f84e447803", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-48651862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d0715f0ccbd49ec8af8e3049d970994", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98011432-48cc-4ffd-a5a8-b96d2ea4424a", "external-id": "nsx-vlan-transportzone-745", "segmentation_id": 745, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap95e5d41a-59", "ovs_interfaceid": "95e5d41a-5998-4d48-9aec-6255c74c448f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1945.964905] env[63024]: DEBUG oslo_vmware.api [None req-696487c3-f23e-4aca-aa6c-e12e93748e09 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951380, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1946.221191] env[63024]: DEBUG nova.virt.hardware [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1946.221503] env[63024]: DEBUG nova.virt.hardware [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1946.221698] env[63024]: DEBUG nova.virt.hardware [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1946.221934] env[63024]: DEBUG nova.virt.hardware [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1946.222612] env[63024]: DEBUG nova.virt.hardware [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1946.223120] env[63024]: DEBUG nova.virt.hardware [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1946.223120] env[63024]: DEBUG nova.virt.hardware [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1946.224826] env[63024]: DEBUG nova.virt.hardware [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1946.225169] env[63024]: DEBUG nova.virt.hardware [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1946.225552] env[63024]: DEBUG nova.virt.hardware [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1946.225811] env[63024]: DEBUG nova.virt.hardware [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1946.234539] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e1fd8fac-5504-4c30-821c-a80e5914ba13 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.257707] env[63024]: DEBUG oslo_vmware.api [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1946.257707] env[63024]: value = "task-1951390" [ 1946.257707] env[63024]: _type = "Task" [ 1946.257707] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1946.268444] env[63024]: DEBUG oslo_vmware.api [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951390, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1946.417548] env[63024]: DEBUG oslo_vmware.api [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951386, 'name': PowerOffVM_Task, 'duration_secs': 1.257616} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1946.418299] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1946.440119] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fb8c2cb4-713a-428e-afd2-0deded9110ae tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Acquiring lock "601a003d-811c-4698-b0b6-054482d32c21" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1946.440598] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fb8c2cb4-713a-428e-afd2-0deded9110ae tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Lock "601a003d-811c-4698-b0b6-054482d32c21" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1946.440919] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fb8c2cb4-713a-428e-afd2-0deded9110ae tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Acquiring lock "601a003d-811c-4698-b0b6-054482d32c21-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1946.441155] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fb8c2cb4-713a-428e-afd2-0deded9110ae tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Lock "601a003d-811c-4698-b0b6-054482d32c21-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1946.441335] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fb8c2cb4-713a-428e-afd2-0deded9110ae tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Lock "601a003d-811c-4698-b0b6-054482d32c21-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1946.444340] env[63024]: INFO nova.compute.manager [None req-fb8c2cb4-713a-428e-afd2-0deded9110ae tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Terminating instance [ 1946.453369] env[63024]: DEBUG oslo_concurrency.lockutils [req-ad7c9697-b6da-4869-ba83-d42d6838b514 req-fc71cabd-ecb9-4feb-b3ae-7490537b9f1f service nova] Releasing lock "refresh_cache-ea24d375-ba88-42ca-a07e-52000ec613c0" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1946.471299] env[63024]: DEBUG oslo_vmware.api [None req-696487c3-f23e-4aca-aa6c-e12e93748e09 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951380, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1946.508950] env[63024]: INFO nova.compute.manager [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Detaching volume 2cfc46b0-10e9-4f4c-8f58-7fff36954695 [ 1946.553893] env[63024]: INFO nova.virt.block_device [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Attempting to driver detach volume 2cfc46b0-10e9-4f4c-8f58-7fff36954695 from mountpoint /dev/sdb [ 1946.554151] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Volume detach. Driver type: vmdk {{(pid=63024) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1946.554320] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402188', 'volume_id': '2cfc46b0-10e9-4f4c-8f58-7fff36954695', 'name': 'volume-2cfc46b0-10e9-4f4c-8f58-7fff36954695', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c28e7c21-7e7d-4cda-81e8-63538bd8a1f7', 'attached_at': '', 'detached_at': '', 'volume_id': '2cfc46b0-10e9-4f4c-8f58-7fff36954695', 'serial': '2cfc46b0-10e9-4f4c-8f58-7fff36954695'} {{(pid=63024) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1946.555366] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce8fea99-2d58-4d0f-8023-ca7b734e4bc2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.579160] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9b0fa5f-6129-4ef9-a619-8a97bc9ee2c4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.592038] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10061e6d-23e6-41ff-8ad4-b32b8504c4fc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.617018] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5c6de9e-4a55-490a-8122-6add8b9a8a37 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.630524] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] The volume has not been displaced from its original location: [datastore1] volume-2cfc46b0-10e9-4f4c-8f58-7fff36954695/volume-2cfc46b0-10e9-4f4c-8f58-7fff36954695.vmdk. No consolidation needed. {{(pid=63024) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1946.636151] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Reconfiguring VM instance instance-0000004d to detach disk 2001 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1946.638154] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-562d5b86-0c28-4b80-8c8e-7bd7f4813460 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.653894] env[63024]: DEBUG nova.network.neutron [-] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1946.657310] env[63024]: DEBUG nova.compute.manager [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1946.666237] env[63024]: DEBUG oslo_vmware.api [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1946.666237] env[63024]: value = "task-1951391" [ 1946.666237] env[63024]: _type = "Task" [ 1946.666237] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1946.682697] env[63024]: DEBUG oslo_vmware.api [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951391, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1946.697699] env[63024]: DEBUG nova.virt.hardware [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1946.697869] env[63024]: DEBUG nova.virt.hardware [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1946.698385] env[63024]: DEBUG nova.virt.hardware [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1946.698821] env[63024]: DEBUG nova.virt.hardware [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1946.698923] env[63024]: DEBUG nova.virt.hardware [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1946.699059] env[63024]: DEBUG nova.virt.hardware [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1946.699400] env[63024]: DEBUG nova.virt.hardware [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1946.699577] env[63024]: DEBUG nova.virt.hardware [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1946.699783] env[63024]: DEBUG nova.virt.hardware [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1946.700435] env[63024]: DEBUG nova.virt.hardware [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1946.700435] env[63024]: DEBUG nova.virt.hardware [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1946.701339] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68e9896e-11c4-43ce-9b83-1c8b2d8ccc13 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.704888] env[63024]: DEBUG nova.network.neutron [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Successfully created port: a75c5293-2308-41d5-9464-4013af532f66 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1946.715418] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf6631d8-5e89-42b3-b943-5e95a4034ab5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.774027] env[63024]: DEBUG oslo_vmware.api [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951390, 'name': ReconfigVM_Task, 'duration_secs': 0.271991} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1946.774027] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Updating instance '9e8e7b6e-1bb2-4e66-b734-2f56e31302af' progress to 33 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1946.888152] env[63024]: DEBUG nova.compute.manager [req-00c30b14-62af-43a7-8aac-a9e6e46f5904 req-b0b9e83e-ea2d-4cc6-a17a-8b397444ea2c service nova] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Received event network-vif-deleted-7cedcfa3-1f00-4ebd-88b4-207d64c14235 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1946.888152] env[63024]: INFO nova.compute.manager [req-00c30b14-62af-43a7-8aac-a9e6e46f5904 req-b0b9e83e-ea2d-4cc6-a17a-8b397444ea2c service nova] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Neutron deleted interface 7cedcfa3-1f00-4ebd-88b4-207d64c14235; detaching it from the instance and deleting it from the info cache [ 1946.888152] env[63024]: DEBUG nova.network.neutron [req-00c30b14-62af-43a7-8aac-a9e6e46f5904 req-b0b9e83e-ea2d-4cc6-a17a-8b397444ea2c service nova] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1946.954916] env[63024]: DEBUG nova.compute.manager [None req-fb8c2cb4-713a-428e-afd2-0deded9110ae tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1946.955195] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb8c2cb4-713a-428e-afd2-0deded9110ae tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1946.960070] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-76c08b96-611c-4988-b6b1-f6493bc78f9d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1946.976046] env[63024]: DEBUG oslo_vmware.api [None req-696487c3-f23e-4aca-aa6c-e12e93748e09 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951380, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1946.978093] env[63024]: DEBUG oslo_vmware.api [None req-fb8c2cb4-713a-428e-afd2-0deded9110ae tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Waiting for the task: (returnval){ [ 1946.978093] env[63024]: value = "task-1951392" [ 1946.978093] env[63024]: _type = "Task" [ 1946.978093] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1946.994989] env[63024]: DEBUG oslo_vmware.api [None req-fb8c2cb4-713a-428e-afd2-0deded9110ae tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Task: {'id': task-1951392, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1947.001362] env[63024]: DEBUG nova.network.neutron [-] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1947.156399] env[63024]: INFO nova.compute.manager [-] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Took 1.43 seconds to deallocate network for instance. [ 1947.179986] env[63024]: DEBUG oslo_vmware.api [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951391, 'name': ReconfigVM_Task, 'duration_secs': 0.329427} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1947.179986] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Reconfigured VM instance instance-0000004d to detach disk 2001 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1947.186453] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-28f3c822-ffa6-45b8-9bbf-f5a06e642e43 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.214620] env[63024]: DEBUG oslo_vmware.api [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1947.214620] env[63024]: value = "task-1951393" [ 1947.214620] env[63024]: _type = "Task" [ 1947.214620] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1947.229053] env[63024]: DEBUG oslo_vmware.api [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951393, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1947.233590] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c54017f-c316-4a08-b1c3-d7f8d1c16cbe {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.242811] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52bb3497-e06c-4130-a378-31ad3aaaeb2e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.282720] env[63024]: DEBUG nova.virt.hardware [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1947.284432] env[63024]: DEBUG nova.virt.hardware [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1947.284432] env[63024]: DEBUG nova.virt.hardware [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1947.284432] env[63024]: DEBUG nova.virt.hardware [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1947.284432] env[63024]: DEBUG nova.virt.hardware [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1947.284432] env[63024]: DEBUG nova.virt.hardware [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1947.284432] env[63024]: DEBUG nova.virt.hardware [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1947.284432] env[63024]: DEBUG nova.virt.hardware [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1947.284432] env[63024]: DEBUG nova.virt.hardware [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1947.284854] env[63024]: DEBUG nova.virt.hardware [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1947.284854] env[63024]: DEBUG nova.virt.hardware [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1947.290559] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Reconfiguring VM instance instance-00000051 to detach disk 2000 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1947.292071] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e648f1e9-5e6b-4cc2-ac6e-f341aa801d6a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.294704] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4028f381-6d1e-49f3-9983-de2093e6a5af {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.317106] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11837dca-6805-4010-98fd-c7477185f282 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.322154] env[63024]: DEBUG oslo_vmware.api [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1947.322154] env[63024]: value = "task-1951394" [ 1947.322154] env[63024]: _type = "Task" [ 1947.322154] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1947.336465] env[63024]: DEBUG nova.compute.provider_tree [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1947.344869] env[63024]: DEBUG oslo_vmware.api [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951394, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1947.395071] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-79c6f321-e146-4d7a-b6a0-97ad7612e90c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.409744] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aa18fa5-a68d-4b79-882e-30ac18d69422 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.451623] env[63024]: DEBUG nova.compute.manager [req-00c30b14-62af-43a7-8aac-a9e6e46f5904 req-b0b9e83e-ea2d-4cc6-a17a-8b397444ea2c service nova] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Detach interface failed, port_id=7cedcfa3-1f00-4ebd-88b4-207d64c14235, reason: Instance 77c27741-ee3a-4a8b-bbd3-89759288f7c6 could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 1947.470397] env[63024]: DEBUG oslo_vmware.api [None req-696487c3-f23e-4aca-aa6c-e12e93748e09 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951380, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1947.493287] env[63024]: DEBUG oslo_vmware.api [None req-fb8c2cb4-713a-428e-afd2-0deded9110ae tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Task: {'id': task-1951392, 'name': PowerOffVM_Task, 'duration_secs': 0.18265} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1947.493901] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-fb8c2cb4-713a-428e-afd2-0deded9110ae tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1947.494208] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb8c2cb4-713a-428e-afd2-0deded9110ae tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Volume detach. Driver type: vmdk {{(pid=63024) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1947.494817] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb8c2cb4-713a-428e-afd2-0deded9110ae tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] [instance: 601a003d-811c-4698-b0b6-054482d32c21] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402080', 'volume_id': '51f1feea-0afc-4d43-8b47-c2e3f20d424c', 'name': 'volume-51f1feea-0afc-4d43-8b47-c2e3f20d424c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '601a003d-811c-4698-b0b6-054482d32c21', 'attached_at': '', 'detached_at': '', 'volume_id': '51f1feea-0afc-4d43-8b47-c2e3f20d424c', 'serial': '51f1feea-0afc-4d43-8b47-c2e3f20d424c'} {{(pid=63024) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1947.495873] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a45a275-5dfa-4539-9ef1-86308c978657 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.518038] env[63024]: INFO nova.compute.manager [-] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Took 2.32 seconds to deallocate network for instance. [ 1947.521619] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee0ac8c5-c07b-4249-85c8-06a9a44b0ed5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.535062] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d18ef5e-dafe-4581-a02f-1b6f54861ec8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.560076] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-197dcac4-a63b-4bd3-a8d5-c302b237e5f1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.577976] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb8c2cb4-713a-428e-afd2-0deded9110ae tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] The volume has not been displaced from its original location: [datastore1] volume-51f1feea-0afc-4d43-8b47-c2e3f20d424c/volume-51f1feea-0afc-4d43-8b47-c2e3f20d424c.vmdk. No consolidation needed. {{(pid=63024) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1947.583418] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb8c2cb4-713a-428e-afd2-0deded9110ae tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Reconfiguring VM instance instance-00000032 to detach disk 2000 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1947.585030] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-09b2b3e6-7c3f-43f1-a94f-c02186bfd0b9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.604295] env[63024]: DEBUG oslo_vmware.api [None req-fb8c2cb4-713a-428e-afd2-0deded9110ae tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Waiting for the task: (returnval){ [ 1947.604295] env[63024]: value = "task-1951395" [ 1947.604295] env[63024]: _type = "Task" [ 1947.604295] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1947.613342] env[63024]: DEBUG oslo_vmware.api [None req-fb8c2cb4-713a-428e-afd2-0deded9110ae tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Task: {'id': task-1951395, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1947.665778] env[63024]: DEBUG oslo_concurrency.lockutils [None req-71fd91f5-f16c-47ad-b8c3-fecbda613c32 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1947.726285] env[63024]: DEBUG oslo_vmware.api [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951393, 'name': ReconfigVM_Task, 'duration_secs': 0.212759} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1947.726689] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402188', 'volume_id': '2cfc46b0-10e9-4f4c-8f58-7fff36954695', 'name': 'volume-2cfc46b0-10e9-4f4c-8f58-7fff36954695', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c28e7c21-7e7d-4cda-81e8-63538bd8a1f7', 'attached_at': '', 'detached_at': '', 'volume_id': '2cfc46b0-10e9-4f4c-8f58-7fff36954695', 'serial': '2cfc46b0-10e9-4f4c-8f58-7fff36954695'} {{(pid=63024) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1947.836609] env[63024]: DEBUG oslo_vmware.api [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951394, 'name': ReconfigVM_Task, 'duration_secs': 0.215954} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1947.836968] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Reconfigured VM instance instance-00000051 to detach disk 2000 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1947.837884] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f19d4626-ca2b-40e6-a27b-9d49b6dbbd87 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.841558] env[63024]: DEBUG nova.scheduler.client.report [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1947.872308] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Reconfiguring VM instance instance-00000051 to attach disk [datastore1] 9e8e7b6e-1bb2-4e66-b734-2f56e31302af/9e8e7b6e-1bb2-4e66-b734-2f56e31302af.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1947.873368] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-33eaee83-9907-42e3-8001-d3d87b8440cd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1947.895713] env[63024]: DEBUG oslo_vmware.api [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1947.895713] env[63024]: value = "task-1951396" [ 1947.895713] env[63024]: _type = "Task" [ 1947.895713] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1947.909906] env[63024]: DEBUG oslo_vmware.api [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951396, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1947.968715] env[63024]: DEBUG oslo_vmware.api [None req-696487c3-f23e-4aca-aa6c-e12e93748e09 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951380, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1948.033688] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9cd6bf9b-c70f-423e-ad10-787b9b139939 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1948.115063] env[63024]: DEBUG oslo_vmware.api [None req-fb8c2cb4-713a-428e-afd2-0deded9110ae tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Task: {'id': task-1951395, 'name': ReconfigVM_Task, 'duration_secs': 0.172522} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1948.115470] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb8c2cb4-713a-428e-afd2-0deded9110ae tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Reconfigured VM instance instance-00000032 to detach disk 2000 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1948.120508] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dcce5ba1-c027-4be5-abf0-c092b6d91870 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.143128] env[63024]: DEBUG oslo_vmware.api [None req-fb8c2cb4-713a-428e-afd2-0deded9110ae tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Waiting for the task: (returnval){ [ 1948.143128] env[63024]: value = "task-1951397" [ 1948.143128] env[63024]: _type = "Task" [ 1948.143128] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1948.156790] env[63024]: DEBUG oslo_vmware.api [None req-fb8c2cb4-713a-428e-afd2-0deded9110ae tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Task: {'id': task-1951397, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1948.350142] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.685s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1948.351391] env[63024]: DEBUG nova.compute.manager [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1948.356043] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.153s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1948.358027] env[63024]: INFO nova.compute.claims [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1948.416919] env[63024]: DEBUG oslo_vmware.api [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951396, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1948.466938] env[63024]: DEBUG oslo_vmware.api [None req-696487c3-f23e-4aca-aa6c-e12e93748e09 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951380, 'name': ReconfigVM_Task, 'duration_secs': 6.044948} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1948.467544] env[63024]: DEBUG oslo_concurrency.lockutils [None req-696487c3-f23e-4aca-aa6c-e12e93748e09 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Releasing lock "9a7f4452-ae50-4779-8474-11d3a6d3533f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1948.467642] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-696487c3-f23e-4aca-aa6c-e12e93748e09 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Reconfigured VM to detach interface {{(pid=63024) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1948.654997] env[63024]: DEBUG oslo_vmware.api [None req-fb8c2cb4-713a-428e-afd2-0deded9110ae tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Task: {'id': task-1951397, 'name': ReconfigVM_Task, 'duration_secs': 0.214657} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1948.655443] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-fb8c2cb4-713a-428e-afd2-0deded9110ae tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402080', 'volume_id': '51f1feea-0afc-4d43-8b47-c2e3f20d424c', 'name': 'volume-51f1feea-0afc-4d43-8b47-c2e3f20d424c', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '601a003d-811c-4698-b0b6-054482d32c21', 'attached_at': '', 'detached_at': '', 'volume_id': '51f1feea-0afc-4d43-8b47-c2e3f20d424c', 'serial': '51f1feea-0afc-4d43-8b47-c2e3f20d424c'} {{(pid=63024) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1948.657653] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-fb8c2cb4-713a-428e-afd2-0deded9110ae tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1948.657653] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d13008da-bd76-4967-8d4c-5ab18cfe05ac {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.665595] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-fb8c2cb4-713a-428e-afd2-0deded9110ae tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1948.666780] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d1b21aeb-0d0c-477b-a647-159fd86b9d7a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.793713] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1948.793940] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9bab12f9-7c74-4ef3-b312-327f7b2dcf9d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.804572] env[63024]: DEBUG oslo_vmware.api [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1948.804572] env[63024]: value = "task-1951399" [ 1948.804572] env[63024]: _type = "Task" [ 1948.804572] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1948.816849] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] VM already powered off {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1948.817322] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Volume detach. Driver type: vmdk {{(pid=63024) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1948.817620] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402188', 'volume_id': '2cfc46b0-10e9-4f4c-8f58-7fff36954695', 'name': 'volume-2cfc46b0-10e9-4f4c-8f58-7fff36954695', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c28e7c21-7e7d-4cda-81e8-63538bd8a1f7', 'attached_at': '', 'detached_at': '', 'volume_id': '2cfc46b0-10e9-4f4c-8f58-7fff36954695', 'serial': '2cfc46b0-10e9-4f4c-8f58-7fff36954695'} {{(pid=63024) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1948.818480] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b09b15a-6524-42f5-b41f-bc7073ddef45 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.842593] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d78589d9-7c0a-49f7-92d8-4f72ddcc5ca0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.845987] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-fb8c2cb4-713a-428e-afd2-0deded9110ae tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1948.846419] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-fb8c2cb4-713a-428e-afd2-0deded9110ae tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1948.846791] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb8c2cb4-713a-428e-afd2-0deded9110ae tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Deleting the datastore file [datastore1] 601a003d-811c-4698-b0b6-054482d32c21 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1948.847222] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-59a799f8-ca82-453a-8599-d6c853a17807 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.853142] env[63024]: WARNING nova.virt.vmwareapi.driver [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 1948.853498] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1948.856078] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8570a39-83ae-4135-b76d-08c9f76a92e1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.858945] env[63024]: DEBUG oslo_vmware.api [None req-fb8c2cb4-713a-428e-afd2-0deded9110ae tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Waiting for the task: (returnval){ [ 1948.858945] env[63024]: value = "task-1951400" [ 1948.858945] env[63024]: _type = "Task" [ 1948.858945] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1948.865980] env[63024]: DEBUG nova.compute.utils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1948.869185] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1948.869754] env[63024]: DEBUG nova.compute.manager [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1948.869976] env[63024]: DEBUG nova.network.neutron [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1948.875811] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ca3b270b-8770-40a8-8dfb-45849ead75ee {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1948.878838] env[63024]: DEBUG oslo_vmware.api [None req-fb8c2cb4-713a-428e-afd2-0deded9110ae tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Task: {'id': task-1951400, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1948.908567] env[63024]: DEBUG oslo_vmware.api [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951396, 'name': ReconfigVM_Task, 'duration_secs': 0.521097} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1948.909114] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Reconfigured VM instance instance-00000051 to attach disk [datastore1] 9e8e7b6e-1bb2-4e66-b734-2f56e31302af/9e8e7b6e-1bb2-4e66-b734-2f56e31302af.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1948.909371] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Updating instance '9e8e7b6e-1bb2-4e66-b734-2f56e31302af' progress to 50 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1948.986809] env[63024]: DEBUG nova.policy [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '99a6a5e80e3543c392eef7553e305bb8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '179b837229c642f1b495b236fd98e8eb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1949.066089] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1949.066437] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1949.066703] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Deleting the datastore file [datastore1] c28e7c21-7e7d-4cda-81e8-63538bd8a1f7 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1949.067069] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e6a6a737-0127-4b74-b274-a353459c3ecf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.079053] env[63024]: DEBUG oslo_vmware.api [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1949.079053] env[63024]: value = "task-1951402" [ 1949.079053] env[63024]: _type = "Task" [ 1949.079053] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1949.085172] env[63024]: DEBUG nova.compute.manager [req-0c7f7b9b-c7b2-4b6e-9f29-2c5bd89c633a req-b4aa4ec1-b3d4-4b4b-9b31-2a325f1dee46 service nova] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Received event network-vif-deleted-e916ba27-f6c3-4efb-9a22-64b761547830 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1949.085283] env[63024]: DEBUG nova.compute.manager [req-0c7f7b9b-c7b2-4b6e-9f29-2c5bd89c633a req-b4aa4ec1-b3d4-4b4b-9b31-2a325f1dee46 service nova] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Received event network-vif-plugged-a75c5293-2308-41d5-9464-4013af532f66 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1949.085536] env[63024]: DEBUG oslo_concurrency.lockutils [req-0c7f7b9b-c7b2-4b6e-9f29-2c5bd89c633a req-b4aa4ec1-b3d4-4b4b-9b31-2a325f1dee46 service nova] Acquiring lock "9e32eb32-6eff-4875-b4a3-adfab4647023-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1949.085915] env[63024]: DEBUG oslo_concurrency.lockutils [req-0c7f7b9b-c7b2-4b6e-9f29-2c5bd89c633a req-b4aa4ec1-b3d4-4b4b-9b31-2a325f1dee46 service nova] Lock "9e32eb32-6eff-4875-b4a3-adfab4647023-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1949.086025] env[63024]: DEBUG oslo_concurrency.lockutils [req-0c7f7b9b-c7b2-4b6e-9f29-2c5bd89c633a req-b4aa4ec1-b3d4-4b4b-9b31-2a325f1dee46 service nova] Lock "9e32eb32-6eff-4875-b4a3-adfab4647023-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1949.086681] env[63024]: DEBUG nova.compute.manager [req-0c7f7b9b-c7b2-4b6e-9f29-2c5bd89c633a req-b4aa4ec1-b3d4-4b4b-9b31-2a325f1dee46 service nova] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] No waiting events found dispatching network-vif-plugged-a75c5293-2308-41d5-9464-4013af532f66 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1949.087110] env[63024]: WARNING nova.compute.manager [req-0c7f7b9b-c7b2-4b6e-9f29-2c5bd89c633a req-b4aa4ec1-b3d4-4b4b-9b31-2a325f1dee46 service nova] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Received unexpected event network-vif-plugged-a75c5293-2308-41d5-9464-4013af532f66 for instance with vm_state building and task_state spawning. [ 1949.100947] env[63024]: DEBUG oslo_vmware.api [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951402, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1949.147417] env[63024]: DEBUG nova.network.neutron [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Successfully updated port: a75c5293-2308-41d5-9464-4013af532f66 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1949.372852] env[63024]: DEBUG nova.compute.manager [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1949.376325] env[63024]: DEBUG oslo_vmware.api [None req-fb8c2cb4-713a-428e-afd2-0deded9110ae tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Task: {'id': task-1951400, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.099134} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1949.380501] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-fb8c2cb4-713a-428e-afd2-0deded9110ae tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1949.380717] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-fb8c2cb4-713a-428e-afd2-0deded9110ae tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1949.380980] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-fb8c2cb4-713a-428e-afd2-0deded9110ae tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1949.381357] env[63024]: INFO nova.compute.manager [None req-fb8c2cb4-713a-428e-afd2-0deded9110ae tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Took 2.43 seconds to destroy the instance on the hypervisor. [ 1949.381586] env[63024]: DEBUG oslo.service.loopingcall [None req-fb8c2cb4-713a-428e-afd2-0deded9110ae tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1949.383773] env[63024]: DEBUG nova.compute.manager [-] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1949.384015] env[63024]: DEBUG nova.network.neutron [-] [instance: 601a003d-811c-4698-b0b6-054482d32c21] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1949.417764] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-142f191b-ed33-4ddb-b8ae-1a5017dd5770 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.445662] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff5a5c01-de29-4dda-b9bc-db695005ef55 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.470376] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Updating instance '9e8e7b6e-1bb2-4e66-b734-2f56e31302af' progress to 67 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1949.598141] env[63024]: DEBUG oslo_vmware.api [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951402, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.19799} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1949.598493] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1949.598837] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1949.599184] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1949.649964] env[63024]: DEBUG oslo_concurrency.lockutils [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "refresh_cache-9e32eb32-6eff-4875-b4a3-adfab4647023" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1949.650136] env[63024]: DEBUG oslo_concurrency.lockutils [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquired lock "refresh_cache-9e32eb32-6eff-4875-b4a3-adfab4647023" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1949.650395] env[63024]: DEBUG nova.network.neutron [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1949.883061] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a061ce48-c254-4b05-bd91-46829d0ee329 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.891633] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c340eb20-28da-4863-a86a-8e470d370805 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.931412] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d57657f6-6468-49ff-af5f-13878f1d7986 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.940308] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56ea6991-2926-4b7b-9469-6329b4047d0b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1949.958383] env[63024]: DEBUG nova.compute.provider_tree [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1950.091369] env[63024]: DEBUG nova.network.neutron [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Successfully created port: 1bb2d3e3-4d49-4c71-86a2-d2210cb9f711 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1950.108895] env[63024]: INFO nova.virt.block_device [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Booting with volume 2cfc46b0-10e9-4f4c-8f58-7fff36954695 at /dev/sdb [ 1950.140318] env[63024]: DEBUG oslo_concurrency.lockutils [None req-696487c3-f23e-4aca-aa6c-e12e93748e09 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "refresh_cache-9a7f4452-ae50-4779-8474-11d3a6d3533f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1950.141258] env[63024]: DEBUG oslo_concurrency.lockutils [None req-696487c3-f23e-4aca-aa6c-e12e93748e09 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquired lock "refresh_cache-9a7f4452-ae50-4779-8474-11d3a6d3533f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1950.141258] env[63024]: DEBUG nova.network.neutron [None req-696487c3-f23e-4aca-aa6c-e12e93748e09 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1950.162784] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fb9612da-2b91-4a7f-b8e8-3af440e93d7c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.175627] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d45d0a5d-4cc2-4a35-977b-960f5cc39d31 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.187693] env[63024]: DEBUG nova.network.neutron [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Port 6aa34054-6865-4348-9871-fd32c747ab34 binding to destination host cpu-1 is already ACTIVE {{(pid=63024) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1950.194565] env[63024]: DEBUG nova.network.neutron [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1950.226185] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b31eeb84-b0d3-4e4b-a87b-b3d75d7dc050 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.238145] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-615955a5-1d8a-48cc-8182-bb79b1a78096 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.288412] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffe058a6-5730-4741-8058-5ed47bba2425 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.297129] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3410c67a-1243-422c-99ef-1da5b9060647 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.314842] env[63024]: DEBUG nova.virt.block_device [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Updating existing volume attachment record: 743ad4b9-0d5e-416a-85f8-c3bd836b015a {{(pid=63024) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1950.384574] env[63024]: DEBUG nova.network.neutron [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Updating instance_info_cache with network_info: [{"id": "a75c5293-2308-41d5-9464-4013af532f66", "address": "fa:16:3e:1e:93:3f", "network": {"id": "b800daa8-f0f9-4823-92b0-ff8d853cc0ff", "bridge": "br-int", "label": "tempest-ServersTestJSON-1406445940-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18540818b60e4483963d14559bc5c38d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec763be6-4041-4651-8fd7-3820cf0ab86d", "external-id": "nsx-vlan-transportzone-943", "segmentation_id": 943, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa75c5293-23", "ovs_interfaceid": "a75c5293-2308-41d5-9464-4013af532f66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1950.387697] env[63024]: DEBUG nova.compute.manager [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1950.422774] env[63024]: DEBUG nova.virt.hardware [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1950.423111] env[63024]: DEBUG nova.virt.hardware [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1950.423282] env[63024]: DEBUG nova.virt.hardware [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1950.423467] env[63024]: DEBUG nova.virt.hardware [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1950.423710] env[63024]: DEBUG nova.virt.hardware [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1950.424617] env[63024]: DEBUG nova.virt.hardware [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1950.424617] env[63024]: DEBUG nova.virt.hardware [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1950.424617] env[63024]: DEBUG nova.virt.hardware [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1950.424847] env[63024]: DEBUG nova.virt.hardware [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1950.424847] env[63024]: DEBUG nova.virt.hardware [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1950.425035] env[63024]: DEBUG nova.virt.hardware [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1950.429346] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b677d71d-6dcc-40f2-bec3-746d233676d8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.438348] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cc33926-e09e-4ad2-9589-b5ebcd224e9f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.462224] env[63024]: DEBUG nova.scheduler.client.report [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1950.887484] env[63024]: INFO nova.network.neutron [None req-696487c3-f23e-4aca-aa6c-e12e93748e09 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Port 42f23b07-4f23-454c-bdba-e075cd549205 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1950.887484] env[63024]: INFO nova.network.neutron [None req-696487c3-f23e-4aca-aa6c-e12e93748e09 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Port 16467e49-8cf6-465f-98d5-471892d9f322 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1950.887484] env[63024]: DEBUG nova.network.neutron [None req-696487c3-f23e-4aca-aa6c-e12e93748e09 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Updating instance_info_cache with network_info: [{"id": "989997b7-12bd-4924-97e2-a65914c47536", "address": "fa:16:3e:3b:9f:01", "network": {"id": "ffb24eaf-c6b6-414f-a69a-0c8806712ddd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-281590202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.234", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9521048e807c4ca2a6d2e74a72b829a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap989997b7-12", "ovs_interfaceid": "989997b7-12bd-4924-97e2-a65914c47536", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1950.890354] env[63024]: DEBUG oslo_concurrency.lockutils [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Releasing lock "refresh_cache-9e32eb32-6eff-4875-b4a3-adfab4647023" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1950.890354] env[63024]: DEBUG nova.compute.manager [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Instance network_info: |[{"id": "a75c5293-2308-41d5-9464-4013af532f66", "address": "fa:16:3e:1e:93:3f", "network": {"id": "b800daa8-f0f9-4823-92b0-ff8d853cc0ff", "bridge": "br-int", "label": "tempest-ServersTestJSON-1406445940-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18540818b60e4483963d14559bc5c38d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec763be6-4041-4651-8fd7-3820cf0ab86d", "external-id": "nsx-vlan-transportzone-943", "segmentation_id": 943, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa75c5293-23", "ovs_interfaceid": "a75c5293-2308-41d5-9464-4013af532f66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1950.890984] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1e:93:3f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ec763be6-4041-4651-8fd7-3820cf0ab86d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a75c5293-2308-41d5-9464-4013af532f66', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1950.900085] env[63024]: DEBUG oslo.service.loopingcall [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1950.900895] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1950.901624] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ced8f9e1-445e-4158-9fd7-00340886bc87 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1950.925928] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1950.925928] env[63024]: value = "task-1951403" [ 1950.925928] env[63024]: _type = "Task" [ 1950.925928] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1950.941195] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951403, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1950.969982] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.614s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1950.970704] env[63024]: DEBUG nova.compute.manager [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1950.974176] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.735s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1950.975856] env[63024]: INFO nova.compute.claims [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1951.177640] env[63024]: DEBUG nova.compute.manager [req-ab090db1-1228-49fb-9210-949c6f4357ad req-c0d07cf1-f710-4fdc-a5bd-cc506e7ca542 service nova] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Received event network-vif-deleted-42f23b07-4f23-454c-bdba-e075cd549205 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1951.177640] env[63024]: DEBUG nova.compute.manager [req-ab090db1-1228-49fb-9210-949c6f4357ad req-c0d07cf1-f710-4fdc-a5bd-cc506e7ca542 service nova] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Received event network-changed-a75c5293-2308-41d5-9464-4013af532f66 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1951.177640] env[63024]: DEBUG nova.compute.manager [req-ab090db1-1228-49fb-9210-949c6f4357ad req-c0d07cf1-f710-4fdc-a5bd-cc506e7ca542 service nova] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Refreshing instance network info cache due to event network-changed-a75c5293-2308-41d5-9464-4013af532f66. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1951.177640] env[63024]: DEBUG oslo_concurrency.lockutils [req-ab090db1-1228-49fb-9210-949c6f4357ad req-c0d07cf1-f710-4fdc-a5bd-cc506e7ca542 service nova] Acquiring lock "refresh_cache-9e32eb32-6eff-4875-b4a3-adfab4647023" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1951.177810] env[63024]: DEBUG oslo_concurrency.lockutils [req-ab090db1-1228-49fb-9210-949c6f4357ad req-c0d07cf1-f710-4fdc-a5bd-cc506e7ca542 service nova] Acquired lock "refresh_cache-9e32eb32-6eff-4875-b4a3-adfab4647023" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1951.178996] env[63024]: DEBUG nova.network.neutron [req-ab090db1-1228-49fb-9210-949c6f4357ad req-c0d07cf1-f710-4fdc-a5bd-cc506e7ca542 service nova] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Refreshing network info cache for port a75c5293-2308-41d5-9464-4013af532f66 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1951.213650] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "9e8e7b6e-1bb2-4e66-b734-2f56e31302af-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1951.213923] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "9e8e7b6e-1bb2-4e66-b734-2f56e31302af-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1951.214140] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "9e8e7b6e-1bb2-4e66-b734-2f56e31302af-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1951.390947] env[63024]: DEBUG oslo_concurrency.lockutils [None req-696487c3-f23e-4aca-aa6c-e12e93748e09 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Releasing lock "refresh_cache-9a7f4452-ae50-4779-8474-11d3a6d3533f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1951.442168] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951403, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1951.485593] env[63024]: DEBUG nova.compute.utils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1951.488126] env[63024]: DEBUG nova.compute.manager [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1951.488352] env[63024]: DEBUG nova.network.neutron [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1951.531606] env[63024]: DEBUG nova.network.neutron [-] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1951.579180] env[63024]: DEBUG nova.policy [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '99a6a5e80e3543c392eef7553e305bb8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '179b837229c642f1b495b236fd98e8eb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1951.592595] env[63024]: DEBUG oslo_concurrency.lockutils [None req-83bf042f-c168-4758-836e-ec78adddd999 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "9a7f4452-ae50-4779-8474-11d3a6d3533f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1951.593117] env[63024]: DEBUG oslo_concurrency.lockutils [None req-83bf042f-c168-4758-836e-ec78adddd999 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "9a7f4452-ae50-4779-8474-11d3a6d3533f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1951.593477] env[63024]: DEBUG oslo_concurrency.lockutils [None req-83bf042f-c168-4758-836e-ec78adddd999 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "9a7f4452-ae50-4779-8474-11d3a6d3533f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1951.593715] env[63024]: DEBUG oslo_concurrency.lockutils [None req-83bf042f-c168-4758-836e-ec78adddd999 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "9a7f4452-ae50-4779-8474-11d3a6d3533f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1951.593894] env[63024]: DEBUG oslo_concurrency.lockutils [None req-83bf042f-c168-4758-836e-ec78adddd999 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "9a7f4452-ae50-4779-8474-11d3a6d3533f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1951.596503] env[63024]: INFO nova.compute.manager [None req-83bf042f-c168-4758-836e-ec78adddd999 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Terminating instance [ 1951.897440] env[63024]: DEBUG oslo_concurrency.lockutils [None req-696487c3-f23e-4aca-aa6c-e12e93748e09 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "interface-9a7f4452-ae50-4779-8474-11d3a6d3533f-42f23b07-4f23-454c-bdba-e075cd549205" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.571s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1951.947710] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951403, 'name': CreateVM_Task, 'duration_secs': 0.610546} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1951.948143] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1951.948627] env[63024]: DEBUG oslo_concurrency.lockutils [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1951.948776] env[63024]: DEBUG oslo_concurrency.lockutils [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1951.949142] env[63024]: DEBUG oslo_concurrency.lockutils [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1951.949392] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-91385f1f-76e3-4a93-87cd-eeb27725b94c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.958415] env[63024]: DEBUG oslo_vmware.api [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 1951.958415] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]522bd8e1-ee48-4042-3c35-328c8a80cb1e" [ 1951.958415] env[63024]: _type = "Task" [ 1951.958415] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1951.971374] env[63024]: DEBUG oslo_vmware.api [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]522bd8e1-ee48-4042-3c35-328c8a80cb1e, 'name': SearchDatastore_Task, 'duration_secs': 0.011443} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1951.971596] env[63024]: DEBUG oslo_concurrency.lockutils [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1951.971947] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1951.975639] env[63024]: DEBUG oslo_concurrency.lockutils [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1951.975639] env[63024]: DEBUG oslo_concurrency.lockutils [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1951.975986] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1951.976179] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-39c6d527-ebcc-476a-abc0-e507e4bf48b7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1951.988932] env[63024]: DEBUG nova.compute.manager [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1951.995761] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1951.995951] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1952.001139] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb1b89a1-80fd-4b68-ba98-9d677a746a16 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.009097] env[63024]: DEBUG oslo_vmware.api [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 1952.009097] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52fca718-6b1a-b8c2-f1f3-521bbd8ae6f8" [ 1952.009097] env[63024]: _type = "Task" [ 1952.009097] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1952.019518] env[63024]: DEBUG oslo_vmware.api [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52fca718-6b1a-b8c2-f1f3-521bbd8ae6f8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1952.036437] env[63024]: INFO nova.compute.manager [-] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Took 2.65 seconds to deallocate network for instance. [ 1952.100563] env[63024]: DEBUG nova.network.neutron [req-ab090db1-1228-49fb-9210-949c6f4357ad req-c0d07cf1-f710-4fdc-a5bd-cc506e7ca542 service nova] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Updated VIF entry in instance network info cache for port a75c5293-2308-41d5-9464-4013af532f66. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1952.100563] env[63024]: DEBUG nova.network.neutron [req-ab090db1-1228-49fb-9210-949c6f4357ad req-c0d07cf1-f710-4fdc-a5bd-cc506e7ca542 service nova] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Updating instance_info_cache with network_info: [{"id": "a75c5293-2308-41d5-9464-4013af532f66", "address": "fa:16:3e:1e:93:3f", "network": {"id": "b800daa8-f0f9-4823-92b0-ff8d853cc0ff", "bridge": "br-int", "label": "tempest-ServersTestJSON-1406445940-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18540818b60e4483963d14559bc5c38d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec763be6-4041-4651-8fd7-3820cf0ab86d", "external-id": "nsx-vlan-transportzone-943", "segmentation_id": 943, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa75c5293-23", "ovs_interfaceid": "a75c5293-2308-41d5-9464-4013af532f66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1952.102035] env[63024]: DEBUG nova.compute.manager [None req-83bf042f-c168-4758-836e-ec78adddd999 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1952.102136] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-83bf042f-c168-4758-836e-ec78adddd999 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1952.104344] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f018e7ef-29df-454a-9b2d-77fd33991a8d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.108144] env[63024]: DEBUG nova.network.neutron [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Successfully created port: 71970f6a-12a4-4779-832b-7b00ff046697 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1952.119252] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-83bf042f-c168-4758-836e-ec78adddd999 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1952.119538] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d0e70f36-d278-49a2-8792-4f18784756c1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.129424] env[63024]: DEBUG oslo_vmware.api [None req-83bf042f-c168-4758-836e-ec78adddd999 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 1952.129424] env[63024]: value = "task-1951404" [ 1952.129424] env[63024]: _type = "Task" [ 1952.129424] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1952.144727] env[63024]: DEBUG oslo_vmware.api [None req-83bf042f-c168-4758-836e-ec78adddd999 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951404, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1952.261023] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "refresh_cache-9e8e7b6e-1bb2-4e66-b734-2f56e31302af" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1952.261023] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquired lock "refresh_cache-9e8e7b6e-1bb2-4e66-b734-2f56e31302af" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1952.261023] env[63024]: DEBUG nova.network.neutron [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1952.481439] env[63024]: DEBUG nova.virt.hardware [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1952.481701] env[63024]: DEBUG nova.virt.hardware [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1952.481876] env[63024]: DEBUG nova.virt.hardware [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1952.482459] env[63024]: DEBUG nova.virt.hardware [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1952.482560] env[63024]: DEBUG nova.virt.hardware [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1952.482746] env[63024]: DEBUG nova.virt.hardware [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1952.482897] env[63024]: DEBUG nova.virt.hardware [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1952.483177] env[63024]: DEBUG nova.virt.hardware [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1952.483437] env[63024]: DEBUG nova.virt.hardware [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1952.483612] env[63024]: DEBUG nova.virt.hardware [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1952.483841] env[63024]: DEBUG nova.virt.hardware [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1952.484774] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-952b2772-f461-494e-917f-750bdda21fd1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.500951] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f353f53-03f7-4f04-a421-589a6d69f9f2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.518026] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8b:26:58', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'afae6acd-1873-4228-9d5a-1cd5d4efe3e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '749aba46-5057-4a6a-8e7c-f7df42b7d129', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1952.526603] env[63024]: DEBUG oslo.service.loopingcall [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1952.534296] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1952.534825] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8e3be384-7590-40b6-90d5-18755649a333 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.556214] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f5ba685-f59a-4120-bdf6-676bb220bc04 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.566224] env[63024]: DEBUG oslo_vmware.api [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52fca718-6b1a-b8c2-f1f3-521bbd8ae6f8, 'name': SearchDatastore_Task, 'duration_secs': 0.014146} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1952.567601] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f75a3fcd-2676-4a68-8104-d4a8200e8848 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.575276] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b9dc651-2e58-4ea2-a384-fbef1c6e88cb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.578579] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1952.578579] env[63024]: value = "task-1951405" [ 1952.578579] env[63024]: _type = "Task" [ 1952.578579] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1952.614516] env[63024]: DEBUG oslo_vmware.api [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 1952.614516] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5261857e-488b-52be-2b11-be61404366f6" [ 1952.614516] env[63024]: _type = "Task" [ 1952.614516] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1952.615068] env[63024]: DEBUG oslo_concurrency.lockutils [req-ab090db1-1228-49fb-9210-949c6f4357ad req-c0d07cf1-f710-4fdc-a5bd-cc506e7ca542 service nova] Releasing lock "refresh_cache-9e32eb32-6eff-4875-b4a3-adfab4647023" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1952.615353] env[63024]: DEBUG nova.compute.manager [req-ab090db1-1228-49fb-9210-949c6f4357ad req-c0d07cf1-f710-4fdc-a5bd-cc506e7ca542 service nova] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Received event network-vif-deleted-16467e49-8cf6-465f-98d5-471892d9f322 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1952.615520] env[63024]: DEBUG nova.compute.manager [req-ab090db1-1228-49fb-9210-949c6f4357ad req-c0d07cf1-f710-4fdc-a5bd-cc506e7ca542 service nova] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Received event network-vif-deleted-2c1caf53-f9b6-4184-b807-b496dcae4cbb {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1952.615686] env[63024]: INFO nova.compute.manager [req-ab090db1-1228-49fb-9210-949c6f4357ad req-c0d07cf1-f710-4fdc-a5bd-cc506e7ca542 service nova] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Neutron deleted interface 2c1caf53-f9b6-4184-b807-b496dcae4cbb; detaching it from the instance and deleting it from the info cache [ 1952.615864] env[63024]: DEBUG nova.network.neutron [req-ab090db1-1228-49fb-9210-949c6f4357ad req-c0d07cf1-f710-4fdc-a5bd-cc506e7ca542 service nova] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1952.618436] env[63024]: INFO nova.compute.manager [None req-fb8c2cb4-713a-428e-afd2-0deded9110ae tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Took 0.58 seconds to detach 1 volumes for instance. [ 1952.624686] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f991ae1-9267-44c6-8d9e-0add57f18600 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.627593] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951405, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1952.628189] env[63024]: DEBUG nova.compute.manager [None req-fb8c2cb4-713a-428e-afd2-0deded9110ae tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Deleting volume: 51f1feea-0afc-4d43-8b47-c2e3f20d424c {{(pid=63024) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1952.643358] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-582480e2-01ae-4f6d-a5b4-e5f8e0347baf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.654229] env[63024]: DEBUG oslo_vmware.api [None req-83bf042f-c168-4758-836e-ec78adddd999 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951404, 'name': PowerOffVM_Task, 'duration_secs': 0.217953} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1952.654487] env[63024]: DEBUG oslo_vmware.api [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5261857e-488b-52be-2b11-be61404366f6, 'name': SearchDatastore_Task, 'duration_secs': 0.015206} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1952.655157] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-83bf042f-c168-4758-836e-ec78adddd999 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1952.655339] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-83bf042f-c168-4758-836e-ec78adddd999 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1952.655648] env[63024]: DEBUG oslo_concurrency.lockutils [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1952.655891] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 9e32eb32-6eff-4875-b4a3-adfab4647023/9e32eb32-6eff-4875-b4a3-adfab4647023.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1952.656266] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-12250d78-db22-467c-9e68-8c1fef315c98 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.657990] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dba578a8-ee60-47b6-b47d-a9a4c9e3e8cb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.668566] env[63024]: DEBUG nova.compute.provider_tree [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1952.676987] env[63024]: DEBUG oslo_vmware.api [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 1952.676987] env[63024]: value = "task-1951406" [ 1952.676987] env[63024]: _type = "Task" [ 1952.676987] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1952.689405] env[63024]: DEBUG oslo_vmware.api [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951406, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1952.937434] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-83bf042f-c168-4758-836e-ec78adddd999 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1952.937762] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-83bf042f-c168-4758-836e-ec78adddd999 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1952.937930] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-83bf042f-c168-4758-836e-ec78adddd999 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Deleting the datastore file [datastore1] 9a7f4452-ae50-4779-8474-11d3a6d3533f {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1952.938240] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2c13a37f-b607-4c0c-b1b0-9552a305af5e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1952.954858] env[63024]: DEBUG nova.network.neutron [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Successfully updated port: 1bb2d3e3-4d49-4c71-86a2-d2210cb9f711 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1952.959682] env[63024]: DEBUG oslo_vmware.api [None req-83bf042f-c168-4758-836e-ec78adddd999 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 1952.959682] env[63024]: value = "task-1951409" [ 1952.959682] env[63024]: _type = "Task" [ 1952.959682] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1952.974938] env[63024]: DEBUG oslo_vmware.api [None req-83bf042f-c168-4758-836e-ec78adddd999 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951409, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.007704] env[63024]: DEBUG nova.compute.manager [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1953.042107] env[63024]: DEBUG nova.virt.hardware [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1953.043113] env[63024]: DEBUG nova.virt.hardware [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1953.043353] env[63024]: DEBUG nova.virt.hardware [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1953.043655] env[63024]: DEBUG nova.virt.hardware [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1953.043833] env[63024]: DEBUG nova.virt.hardware [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1953.043991] env[63024]: DEBUG nova.virt.hardware [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1953.044222] env[63024]: DEBUG nova.virt.hardware [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1953.044433] env[63024]: DEBUG nova.virt.hardware [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1953.044691] env[63024]: DEBUG nova.virt.hardware [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1953.044871] env[63024]: DEBUG nova.virt.hardware [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1953.045098] env[63024]: DEBUG nova.virt.hardware [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1953.046374] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-207d73cf-b8f1-488c-98c7-6a36c60e8830 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.059507] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4fd4ed7-86e4-4b4f-830c-c8a43a989637 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.066154] env[63024]: DEBUG oslo_vmware.rw_handles [None req-b4a87a78-d330-4fd1-946b-a8894bc3f270 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e80921-29b9-e77e-3ec2-75a3eed773ad/disk-0.vmdk. {{(pid=63024) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1953.067234] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60506778-4a8e-444d-a1e2-fc0f66a672da {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.087582] env[63024]: DEBUG oslo_vmware.rw_handles [None req-b4a87a78-d330-4fd1-946b-a8894bc3f270 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e80921-29b9-e77e-3ec2-75a3eed773ad/disk-0.vmdk is in state: ready. {{(pid=63024) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1953.090894] env[63024]: ERROR oslo_vmware.rw_handles [None req-b4a87a78-d330-4fd1-946b-a8894bc3f270 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e80921-29b9-e77e-3ec2-75a3eed773ad/disk-0.vmdk due to incomplete transfer. [ 1953.094387] env[63024]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-04ff3fbc-4447-427e-bc9c-7574fa3aaf8a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.108567] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951405, 'name': CreateVM_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.112415] env[63024]: DEBUG oslo_vmware.rw_handles [None req-b4a87a78-d330-4fd1-946b-a8894bc3f270 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52e80921-29b9-e77e-3ec2-75a3eed773ad/disk-0.vmdk. {{(pid=63024) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1953.112641] env[63024]: DEBUG nova.virt.vmwareapi.images [None req-b4a87a78-d330-4fd1-946b-a8894bc3f270 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Uploaded image 28dd7777-46b4-4ea5-820b-4e139758df9b to the Glance image server {{(pid=63024) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1953.119718] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4a87a78-d330-4fd1-946b-a8894bc3f270 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Destroying the VM {{(pid=63024) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1953.120103] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-4a207033-bcca-49ef-8412-4959967fbf39 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.122695] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f004a11d-d984-49dc-9a30-e09c09cc13e7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.136518] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d38120c-acea-4de1-98a9-6a6fd0354862 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.148421] env[63024]: DEBUG oslo_vmware.api [None req-b4a87a78-d330-4fd1-946b-a8894bc3f270 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 1953.148421] env[63024]: value = "task-1951410" [ 1953.148421] env[63024]: _type = "Task" [ 1953.148421] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1953.159613] env[63024]: DEBUG oslo_vmware.api [None req-b4a87a78-d330-4fd1-946b-a8894bc3f270 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951410, 'name': Destroy_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.165180] env[63024]: DEBUG nova.network.neutron [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Updating instance_info_cache with network_info: [{"id": "6aa34054-6865-4348-9871-fd32c747ab34", "address": "fa:16:3e:82:1f:7b", "network": {"id": "67ab6461-8fa5-4dfd-9267-561a710ae91b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1814728209-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1886be852b01400aaf7a31c8fe5d4d7a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6aa34054-68", "ovs_interfaceid": "6aa34054-6865-4348-9871-fd32c747ab34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1953.182681] env[63024]: DEBUG nova.scheduler.client.report [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1953.186744] env[63024]: DEBUG nova.compute.manager [req-ab090db1-1228-49fb-9210-949c6f4357ad req-c0d07cf1-f710-4fdc-a5bd-cc506e7ca542 service nova] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Detach interface failed, port_id=2c1caf53-f9b6-4184-b807-b496dcae4cbb, reason: Instance 601a003d-811c-4698-b0b6-054482d32c21 could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 1953.188830] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fb8c2cb4-713a-428e-afd2-0deded9110ae tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1953.201691] env[63024]: DEBUG oslo_vmware.api [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951406, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.352821] env[63024]: DEBUG nova.compute.manager [req-2df6b173-6eb4-4357-9a19-1db7aa29fcc1 req-79b278d0-9a65-48d1-98fd-15d46eb3697e service nova] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Received event network-vif-plugged-1bb2d3e3-4d49-4c71-86a2-d2210cb9f711 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1953.353268] env[63024]: DEBUG oslo_concurrency.lockutils [req-2df6b173-6eb4-4357-9a19-1db7aa29fcc1 req-79b278d0-9a65-48d1-98fd-15d46eb3697e service nova] Acquiring lock "12e63b42-5554-44d5-86eb-d592bc0b2ad6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1953.353533] env[63024]: DEBUG oslo_concurrency.lockutils [req-2df6b173-6eb4-4357-9a19-1db7aa29fcc1 req-79b278d0-9a65-48d1-98fd-15d46eb3697e service nova] Lock "12e63b42-5554-44d5-86eb-d592bc0b2ad6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1953.353738] env[63024]: DEBUG oslo_concurrency.lockutils [req-2df6b173-6eb4-4357-9a19-1db7aa29fcc1 req-79b278d0-9a65-48d1-98fd-15d46eb3697e service nova] Lock "12e63b42-5554-44d5-86eb-d592bc0b2ad6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1953.353945] env[63024]: DEBUG nova.compute.manager [req-2df6b173-6eb4-4357-9a19-1db7aa29fcc1 req-79b278d0-9a65-48d1-98fd-15d46eb3697e service nova] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] No waiting events found dispatching network-vif-plugged-1bb2d3e3-4d49-4c71-86a2-d2210cb9f711 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1953.354243] env[63024]: WARNING nova.compute.manager [req-2df6b173-6eb4-4357-9a19-1db7aa29fcc1 req-79b278d0-9a65-48d1-98fd-15d46eb3697e service nova] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Received unexpected event network-vif-plugged-1bb2d3e3-4d49-4c71-86a2-d2210cb9f711 for instance with vm_state building and task_state spawning. [ 1953.354439] env[63024]: DEBUG nova.compute.manager [req-2df6b173-6eb4-4357-9a19-1db7aa29fcc1 req-79b278d0-9a65-48d1-98fd-15d46eb3697e service nova] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Received event network-changed-1bb2d3e3-4d49-4c71-86a2-d2210cb9f711 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1953.354630] env[63024]: DEBUG nova.compute.manager [req-2df6b173-6eb4-4357-9a19-1db7aa29fcc1 req-79b278d0-9a65-48d1-98fd-15d46eb3697e service nova] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Refreshing instance network info cache due to event network-changed-1bb2d3e3-4d49-4c71-86a2-d2210cb9f711. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1953.354869] env[63024]: DEBUG oslo_concurrency.lockutils [req-2df6b173-6eb4-4357-9a19-1db7aa29fcc1 req-79b278d0-9a65-48d1-98fd-15d46eb3697e service nova] Acquiring lock "refresh_cache-12e63b42-5554-44d5-86eb-d592bc0b2ad6" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1953.355036] env[63024]: DEBUG oslo_concurrency.lockutils [req-2df6b173-6eb4-4357-9a19-1db7aa29fcc1 req-79b278d0-9a65-48d1-98fd-15d46eb3697e service nova] Acquired lock "refresh_cache-12e63b42-5554-44d5-86eb-d592bc0b2ad6" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1953.355204] env[63024]: DEBUG nova.network.neutron [req-2df6b173-6eb4-4357-9a19-1db7aa29fcc1 req-79b278d0-9a65-48d1-98fd-15d46eb3697e service nova] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Refreshing network info cache for port 1bb2d3e3-4d49-4c71-86a2-d2210cb9f711 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1953.461408] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Acquiring lock "refresh_cache-12e63b42-5554-44d5-86eb-d592bc0b2ad6" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1953.476152] env[63024]: DEBUG oslo_vmware.api [None req-83bf042f-c168-4758-836e-ec78adddd999 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951409, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.603461] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951405, 'name': CreateVM_Task, 'duration_secs': 0.992814} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1953.604231] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1953.604555] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1953.604555] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1953.604788] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1953.608324] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-167c35bd-6cfa-4c7d-92af-53cd6587c5ac {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.610760] env[63024]: DEBUG oslo_vmware.api [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1953.610760] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5261c613-567f-6429-d851-a1f4712496c5" [ 1953.610760] env[63024]: _type = "Task" [ 1953.610760] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1953.623311] env[63024]: DEBUG oslo_vmware.api [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5261c613-567f-6429-d851-a1f4712496c5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.660289] env[63024]: DEBUG oslo_vmware.api [None req-b4a87a78-d330-4fd1-946b-a8894bc3f270 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951410, 'name': Destroy_Task} progress is 33%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.668911] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Releasing lock "refresh_cache-9e8e7b6e-1bb2-4e66-b734-2f56e31302af" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1953.691320] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.717s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1953.691909] env[63024]: DEBUG nova.compute.manager [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1953.695768] env[63024]: DEBUG oslo_concurrency.lockutils [None req-05bb1c70-3b7e-4b59-9d2a-2214d5bafd95 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.827s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1953.695768] env[63024]: DEBUG nova.objects.instance [None req-05bb1c70-3b7e-4b59-9d2a-2214d5bafd95 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Lazy-loading 'resources' on Instance uuid b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1953.704719] env[63024]: DEBUG oslo_vmware.api [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951406, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.63721} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1953.706972] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 9e32eb32-6eff-4875-b4a3-adfab4647023/9e32eb32-6eff-4875-b4a3-adfab4647023.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1953.706972] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1953.706972] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e18ab4d9-3433-42fa-aab2-2bc76cd193c1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1953.715027] env[63024]: DEBUG oslo_vmware.api [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 1953.715027] env[63024]: value = "task-1951411" [ 1953.715027] env[63024]: _type = "Task" [ 1953.715027] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1953.727216] env[63024]: DEBUG oslo_vmware.api [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951411, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1953.907245] env[63024]: DEBUG nova.network.neutron [req-2df6b173-6eb4-4357-9a19-1db7aa29fcc1 req-79b278d0-9a65-48d1-98fd-15d46eb3697e service nova] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1953.978608] env[63024]: DEBUG oslo_vmware.api [None req-83bf042f-c168-4758-836e-ec78adddd999 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951409, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.820565} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1953.981811] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-83bf042f-c168-4758-836e-ec78adddd999 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1953.982046] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-83bf042f-c168-4758-836e-ec78adddd999 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1953.982234] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-83bf042f-c168-4758-836e-ec78adddd999 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1953.982454] env[63024]: INFO nova.compute.manager [None req-83bf042f-c168-4758-836e-ec78adddd999 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Took 1.88 seconds to destroy the instance on the hypervisor. [ 1953.982770] env[63024]: DEBUG oslo.service.loopingcall [None req-83bf042f-c168-4758-836e-ec78adddd999 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1953.982998] env[63024]: DEBUG nova.compute.manager [-] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1953.984918] env[63024]: DEBUG nova.network.neutron [-] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1954.051101] env[63024]: DEBUG nova.network.neutron [req-2df6b173-6eb4-4357-9a19-1db7aa29fcc1 req-79b278d0-9a65-48d1-98fd-15d46eb3697e service nova] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1954.122713] env[63024]: DEBUG oslo_vmware.api [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5261c613-567f-6429-d851-a1f4712496c5, 'name': SearchDatastore_Task, 'duration_secs': 0.025557} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1954.123051] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1954.123305] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1954.123541] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1954.123686] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1954.123866] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1954.124171] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-081c6c6f-d110-42a8-8d55-afed450ef640 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.133611] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1954.133793] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1954.134548] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8d206b0e-8d7f-456c-a741-d84a9220dcfb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.140678] env[63024]: DEBUG oslo_vmware.api [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1954.140678] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]523a95d2-5c4c-8cea-3198-b3449d9343a7" [ 1954.140678] env[63024]: _type = "Task" [ 1954.140678] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1954.148753] env[63024]: DEBUG oslo_vmware.api [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]523a95d2-5c4c-8cea-3198-b3449d9343a7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1954.157671] env[63024]: DEBUG oslo_vmware.api [None req-b4a87a78-d330-4fd1-946b-a8894bc3f270 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951410, 'name': Destroy_Task, 'duration_secs': 0.713025} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1954.157971] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-b4a87a78-d330-4fd1-946b-a8894bc3f270 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Destroyed the VM [ 1954.158242] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b4a87a78-d330-4fd1-946b-a8894bc3f270 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Deleting Snapshot of the VM instance {{(pid=63024) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1954.158485] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-301ab3de-ba12-4fae-b1b3-ee6881a8b01c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.165984] env[63024]: DEBUG oslo_vmware.api [None req-b4a87a78-d330-4fd1-946b-a8894bc3f270 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 1954.165984] env[63024]: value = "task-1951412" [ 1954.165984] env[63024]: _type = "Task" [ 1954.165984] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1954.174614] env[63024]: DEBUG oslo_vmware.api [None req-b4a87a78-d330-4fd1-946b-a8894bc3f270 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951412, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1954.194757] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30fbd543-03c7-487b-ac76-929f673a1778 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.198503] env[63024]: DEBUG nova.compute.utils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1954.208441] env[63024]: DEBUG nova.compute.manager [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1954.208613] env[63024]: DEBUG nova.network.neutron [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1954.238386] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc6233ff-99c0-4e5e-9ec0-94f21df83c8e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.245553] env[63024]: DEBUG oslo_vmware.api [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951411, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071734} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1954.246420] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1954.247492] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85563924-345d-4727-bb6d-1b1534c45856 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.252785] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Updating instance '9e8e7b6e-1bb2-4e66-b734-2f56e31302af' progress to 83 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1954.281311] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Reconfiguring VM instance instance-00000056 to attach disk [datastore1] 9e32eb32-6eff-4875-b4a3-adfab4647023/9e32eb32-6eff-4875-b4a3-adfab4647023.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1954.285124] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1704b789-2aa7-4862-915e-41f6a3fe45d8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.302809] env[63024]: DEBUG nova.policy [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '99a6a5e80e3543c392eef7553e305bb8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '179b837229c642f1b495b236fd98e8eb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1954.312505] env[63024]: DEBUG oslo_vmware.api [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 1954.312505] env[63024]: value = "task-1951413" [ 1954.312505] env[63024]: _type = "Task" [ 1954.312505] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1954.325900] env[63024]: DEBUG oslo_vmware.api [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951413, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1954.557978] env[63024]: DEBUG oslo_concurrency.lockutils [req-2df6b173-6eb4-4357-9a19-1db7aa29fcc1 req-79b278d0-9a65-48d1-98fd-15d46eb3697e service nova] Releasing lock "refresh_cache-12e63b42-5554-44d5-86eb-d592bc0b2ad6" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1954.559072] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Acquired lock "refresh_cache-12e63b42-5554-44d5-86eb-d592bc0b2ad6" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1954.559224] env[63024]: DEBUG nova.network.neutron [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1954.565203] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Acquiring lock "6c277ff8-ec25-4fd7-9dea-0efea9a0de29" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1954.565510] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Lock "6c277ff8-ec25-4fd7-9dea-0efea9a0de29" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1954.654713] env[63024]: DEBUG oslo_vmware.api [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]523a95d2-5c4c-8cea-3198-b3449d9343a7, 'name': SearchDatastore_Task, 'duration_secs': 0.009318} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1954.655914] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6deb1298-1dac-4831-9e5c-792602fca53f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.666216] env[63024]: DEBUG oslo_vmware.api [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1954.666216] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52321f6a-eed6-529e-dc99-42338e83bb6f" [ 1954.666216] env[63024]: _type = "Task" [ 1954.666216] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1954.687808] env[63024]: DEBUG oslo_vmware.api [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52321f6a-eed6-529e-dc99-42338e83bb6f, 'name': SearchDatastore_Task, 'duration_secs': 0.00999} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1954.688085] env[63024]: DEBUG oslo_vmware.api [None req-b4a87a78-d330-4fd1-946b-a8894bc3f270 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951412, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1954.688338] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1954.688589] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] c28e7c21-7e7d-4cda-81e8-63538bd8a1f7/c28e7c21-7e7d-4cda-81e8-63538bd8a1f7.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1954.688843] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-46769e00-1d4c-47cf-b003-8ccd158e6199 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.699824] env[63024]: DEBUG oslo_vmware.api [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1954.699824] env[63024]: value = "task-1951414" [ 1954.699824] env[63024]: _type = "Task" [ 1954.699824] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1954.709794] env[63024]: DEBUG nova.compute.manager [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1954.719701] env[63024]: DEBUG oslo_vmware.api [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951414, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1954.760890] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1954.761582] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6cf453e9-a72c-4e7e-a282-c83964326907 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.772446] env[63024]: DEBUG oslo_vmware.api [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1954.772446] env[63024]: value = "task-1951415" [ 1954.772446] env[63024]: _type = "Task" [ 1954.772446] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1954.778955] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-126f0425-a4f8-4105-807a-89b22a979c87 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.789969] env[63024]: DEBUG oslo_vmware.api [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951415, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1954.796890] env[63024]: DEBUG nova.network.neutron [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Successfully created port: 119df22d-0c91-429b-8927-6e0b9a7412f5 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1954.807521] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00f5a48b-a302-44cd-bcdd-892855df498e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.850059] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5404967c-b262-40e9-aa16-e0f1f8da5295 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.858067] env[63024]: DEBUG oslo_vmware.api [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951413, 'name': ReconfigVM_Task, 'duration_secs': 0.319727} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1954.858801] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Reconfigured VM instance instance-00000056 to attach disk [datastore1] 9e32eb32-6eff-4875-b4a3-adfab4647023/9e32eb32-6eff-4875-b4a3-adfab4647023.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1954.859532] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bfe85942-9769-4686-bbc6-4585bbb19747 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.867020] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6ad1a26-8538-441d-9bcd-aa0b918030eb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1954.875672] env[63024]: DEBUG oslo_vmware.api [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 1954.875672] env[63024]: value = "task-1951416" [ 1954.875672] env[63024]: _type = "Task" [ 1954.875672] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1954.889381] env[63024]: DEBUG nova.compute.provider_tree [None req-05bb1c70-3b7e-4b59-9d2a-2214d5bafd95 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1954.893461] env[63024]: DEBUG oslo_vmware.api [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951416, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.068985] env[63024]: DEBUG nova.compute.manager [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1955.144669] env[63024]: DEBUG nova.network.neutron [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Successfully updated port: 71970f6a-12a4-4779-832b-7b00ff046697 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1955.176402] env[63024]: DEBUG nova.network.neutron [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1955.187862] env[63024]: DEBUG oslo_vmware.api [None req-b4a87a78-d330-4fd1-946b-a8894bc3f270 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951412, 'name': RemoveSnapshot_Task, 'duration_secs': 0.528271} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1955.188144] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b4a87a78-d330-4fd1-946b-a8894bc3f270 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Deleted Snapshot of the VM instance {{(pid=63024) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1955.188366] env[63024]: INFO nova.compute.manager [None req-b4a87a78-d330-4fd1-946b-a8894bc3f270 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Took 16.20 seconds to snapshot the instance on the hypervisor. [ 1955.226478] env[63024]: DEBUG oslo_vmware.api [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951414, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.509203} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1955.227307] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] c28e7c21-7e7d-4cda-81e8-63538bd8a1f7/c28e7c21-7e7d-4cda-81e8-63538bd8a1f7.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1955.227307] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1955.227585] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c69d0def-2fef-4667-871c-8a76d3d363b9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.235950] env[63024]: DEBUG oslo_vmware.api [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1955.235950] env[63024]: value = "task-1951417" [ 1955.235950] env[63024]: _type = "Task" [ 1955.235950] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1955.249552] env[63024]: DEBUG oslo_vmware.api [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951417, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.284087] env[63024]: DEBUG oslo_vmware.api [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951415, 'name': PowerOnVM_Task, 'duration_secs': 0.478508} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1955.284450] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1955.284678] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-94a21b28-57d0-4a81-923f-997d3a41140b tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Updating instance '9e8e7b6e-1bb2-4e66-b734-2f56e31302af' progress to 100 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1955.387765] env[63024]: DEBUG oslo_vmware.api [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951416, 'name': Rename_Task, 'duration_secs': 0.421075} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1955.387926] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1955.388238] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0f8c904f-2570-457b-af20-38813269293b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.396916] env[63024]: DEBUG nova.scheduler.client.report [None req-05bb1c70-3b7e-4b59-9d2a-2214d5bafd95 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1955.400452] env[63024]: DEBUG oslo_vmware.api [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 1955.400452] env[63024]: value = "task-1951418" [ 1955.400452] env[63024]: _type = "Task" [ 1955.400452] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1955.414770] env[63024]: DEBUG oslo_vmware.api [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951418, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.475054] env[63024]: DEBUG nova.network.neutron [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Updating instance_info_cache with network_info: [{"id": "1bb2d3e3-4d49-4c71-86a2-d2210cb9f711", "address": "fa:16:3e:6e:04:00", "network": {"id": "44fbdd08-2faf-4ea7-b8c6-4f2045ca0856", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-573137423-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "179b837229c642f1b495b236fd98e8eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aaf1b231-3660-4453-b4f3-44d825b9a5dd", "external-id": "nsx-vlan-transportzone-6", "segmentation_id": 6, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1bb2d3e3-4d", "ovs_interfaceid": "1bb2d3e3-4d49-4c71-86a2-d2210cb9f711", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1955.606851] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1955.621703] env[63024]: DEBUG nova.compute.manager [req-dacc17c6-1ac5-4f82-8eeb-b33eaa362ff2 req-282fce54-7b7c-4daf-a9bc-2f65ace9aea6 service nova] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Received event network-vif-plugged-71970f6a-12a4-4779-832b-7b00ff046697 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1955.621800] env[63024]: DEBUG oslo_concurrency.lockutils [req-dacc17c6-1ac5-4f82-8eeb-b33eaa362ff2 req-282fce54-7b7c-4daf-a9bc-2f65ace9aea6 service nova] Acquiring lock "c71abfaa-dc65-4d1b-8a34-dff9dd682fe7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1955.622043] env[63024]: DEBUG oslo_concurrency.lockutils [req-dacc17c6-1ac5-4f82-8eeb-b33eaa362ff2 req-282fce54-7b7c-4daf-a9bc-2f65ace9aea6 service nova] Lock "c71abfaa-dc65-4d1b-8a34-dff9dd682fe7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1955.622425] env[63024]: DEBUG oslo_concurrency.lockutils [req-dacc17c6-1ac5-4f82-8eeb-b33eaa362ff2 req-282fce54-7b7c-4daf-a9bc-2f65ace9aea6 service nova] Lock "c71abfaa-dc65-4d1b-8a34-dff9dd682fe7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1955.622559] env[63024]: DEBUG nova.compute.manager [req-dacc17c6-1ac5-4f82-8eeb-b33eaa362ff2 req-282fce54-7b7c-4daf-a9bc-2f65ace9aea6 service nova] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] No waiting events found dispatching network-vif-plugged-71970f6a-12a4-4779-832b-7b00ff046697 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1955.622732] env[63024]: WARNING nova.compute.manager [req-dacc17c6-1ac5-4f82-8eeb-b33eaa362ff2 req-282fce54-7b7c-4daf-a9bc-2f65ace9aea6 service nova] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Received unexpected event network-vif-plugged-71970f6a-12a4-4779-832b-7b00ff046697 for instance with vm_state building and task_state spawning. [ 1955.624026] env[63024]: DEBUG nova.compute.manager [req-dacc17c6-1ac5-4f82-8eeb-b33eaa362ff2 req-282fce54-7b7c-4daf-a9bc-2f65ace9aea6 service nova] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Received event network-changed-71970f6a-12a4-4779-832b-7b00ff046697 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1955.624026] env[63024]: DEBUG nova.compute.manager [req-dacc17c6-1ac5-4f82-8eeb-b33eaa362ff2 req-282fce54-7b7c-4daf-a9bc-2f65ace9aea6 service nova] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Refreshing instance network info cache due to event network-changed-71970f6a-12a4-4779-832b-7b00ff046697. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1955.624026] env[63024]: DEBUG oslo_concurrency.lockutils [req-dacc17c6-1ac5-4f82-8eeb-b33eaa362ff2 req-282fce54-7b7c-4daf-a9bc-2f65ace9aea6 service nova] Acquiring lock "refresh_cache-c71abfaa-dc65-4d1b-8a34-dff9dd682fe7" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1955.624026] env[63024]: DEBUG oslo_concurrency.lockutils [req-dacc17c6-1ac5-4f82-8eeb-b33eaa362ff2 req-282fce54-7b7c-4daf-a9bc-2f65ace9aea6 service nova] Acquired lock "refresh_cache-c71abfaa-dc65-4d1b-8a34-dff9dd682fe7" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1955.624026] env[63024]: DEBUG nova.network.neutron [req-dacc17c6-1ac5-4f82-8eeb-b33eaa362ff2 req-282fce54-7b7c-4daf-a9bc-2f65ace9aea6 service nova] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Refreshing network info cache for port 71970f6a-12a4-4779-832b-7b00ff046697 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1955.647677] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Acquiring lock "refresh_cache-c71abfaa-dc65-4d1b-8a34-dff9dd682fe7" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1955.728808] env[63024]: DEBUG nova.compute.manager [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1955.748789] env[63024]: DEBUG oslo_vmware.api [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951417, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068607} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1955.751945] env[63024]: DEBUG nova.compute.manager [None req-b4a87a78-d330-4fd1-946b-a8894bc3f270 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Found 3 images (rotation: 2) {{(pid=63024) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4884}} [ 1955.752052] env[63024]: DEBUG nova.compute.manager [None req-b4a87a78-d330-4fd1-946b-a8894bc3f270 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Rotating out 1 backups {{(pid=63024) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4892}} [ 1955.752232] env[63024]: DEBUG nova.compute.manager [None req-b4a87a78-d330-4fd1-946b-a8894bc3f270 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Deleting image 54e4ffe9-c344-412d-954f-469fecd124bc {{(pid=63024) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4897}} [ 1955.754464] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1955.756945] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3932be13-8eda-4ff5-b5b2-a96ee665b4eb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.763390] env[63024]: DEBUG nova.virt.hardware [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1955.763768] env[63024]: DEBUG nova.virt.hardware [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1955.763976] env[63024]: DEBUG nova.virt.hardware [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1955.764163] env[63024]: DEBUG nova.virt.hardware [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1955.764389] env[63024]: DEBUG nova.virt.hardware [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1955.764815] env[63024]: DEBUG nova.virt.hardware [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1955.764990] env[63024]: DEBUG nova.virt.hardware [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1955.765178] env[63024]: DEBUG nova.virt.hardware [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1955.765352] env[63024]: DEBUG nova.virt.hardware [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1955.765524] env[63024]: DEBUG nova.virt.hardware [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1955.765868] env[63024]: DEBUG nova.virt.hardware [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1955.767059] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbf410d5-ab71-456c-8ec7-83d9085abbd3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.790683] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] c28e7c21-7e7d-4cda-81e8-63538bd8a1f7/c28e7c21-7e7d-4cda-81e8-63538bd8a1f7.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1955.797554] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2784bd1f-45b3-40b0-80d3-6eb36fc60b0f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.822079] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb92a0c2-9ec8-45d3-9ecc-523a029b938f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.829215] env[63024]: DEBUG oslo_vmware.api [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1955.829215] env[63024]: value = "task-1951419" [ 1955.829215] env[63024]: _type = "Task" [ 1955.829215] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1955.853721] env[63024]: DEBUG oslo_vmware.api [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951419, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1955.901158] env[63024]: DEBUG oslo_concurrency.lockutils [None req-05bb1c70-3b7e-4b59-9d2a-2214d5bafd95 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.206s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1955.905193] env[63024]: DEBUG oslo_concurrency.lockutils [None req-48e1ca48-b7bf-406a-9617-f8f25e1b770a tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.138s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1955.905544] env[63024]: DEBUG nova.objects.instance [None req-48e1ca48-b7bf-406a-9617-f8f25e1b770a tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Lazy-loading 'resources' on Instance uuid 669c45b0-34d6-45f8-a30e-b9b96cfd71ef {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1955.917904] env[63024]: DEBUG oslo_vmware.api [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951418, 'name': PowerOnVM_Task, 'duration_secs': 0.518493} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1955.918601] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1955.919045] env[63024]: INFO nova.compute.manager [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Took 9.26 seconds to spawn the instance on the hypervisor. [ 1955.919151] env[63024]: DEBUG nova.compute.manager [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1955.920635] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40cfffbe-ccb2-4011-b981-9863ef92b339 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1955.934345] env[63024]: INFO nova.scheduler.client.report [None req-05bb1c70-3b7e-4b59-9d2a-2214d5bafd95 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Deleted allocations for instance b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4 [ 1955.935810] env[63024]: DEBUG nova.network.neutron [-] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1955.980081] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Releasing lock "refresh_cache-12e63b42-5554-44d5-86eb-d592bc0b2ad6" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1955.980556] env[63024]: DEBUG nova.compute.manager [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Instance network_info: |[{"id": "1bb2d3e3-4d49-4c71-86a2-d2210cb9f711", "address": "fa:16:3e:6e:04:00", "network": {"id": "44fbdd08-2faf-4ea7-b8c6-4f2045ca0856", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-573137423-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "179b837229c642f1b495b236fd98e8eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aaf1b231-3660-4453-b4f3-44d825b9a5dd", "external-id": "nsx-vlan-transportzone-6", "segmentation_id": 6, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1bb2d3e3-4d", "ovs_interfaceid": "1bb2d3e3-4d49-4c71-86a2-d2210cb9f711", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1955.981081] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6e:04:00', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aaf1b231-3660-4453-b4f3-44d825b9a5dd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1bb2d3e3-4d49-4c71-86a2-d2210cb9f711', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1955.992151] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Creating folder: Project (179b837229c642f1b495b236fd98e8eb). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1955.994452] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a1446f3c-c20e-4965-ba1a-b029ac65cae5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.015073] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Created folder: Project (179b837229c642f1b495b236fd98e8eb) in parent group-v401959. [ 1956.015381] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Creating folder: Instances. Parent ref: group-v402195. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1956.016238] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5bf392b5-f2ba-40ea-bc45-bfba375f755f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.031922] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Created folder: Instances in parent group-v402195. [ 1956.032251] env[63024]: DEBUG oslo.service.loopingcall [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1956.032484] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1956.032717] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0b788129-19ab-4ad2-aae2-50c7ace250c2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.055785] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1956.055785] env[63024]: value = "task-1951422" [ 1956.055785] env[63024]: _type = "Task" [ 1956.055785] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1956.065039] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951422, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1956.178440] env[63024]: DEBUG nova.network.neutron [req-dacc17c6-1ac5-4f82-8eeb-b33eaa362ff2 req-282fce54-7b7c-4daf-a9bc-2f65ace9aea6 service nova] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1956.343027] env[63024]: DEBUG oslo_vmware.api [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951419, 'name': ReconfigVM_Task, 'duration_secs': 0.301011} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1956.343229] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Reconfigured VM instance instance-0000004d to attach disk [datastore1] c28e7c21-7e7d-4cda-81e8-63538bd8a1f7/c28e7c21-7e7d-4cda-81e8-63538bd8a1f7.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1956.345684] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'size': 0, 'device_name': '/dev/sda', 'encryption_secret_uuid': None, 'encrypted': False, 'encryption_options': None, 'boot_index': 0, 'encryption_format': None, 'guest_format': None, 'device_type': 'disk', 'disk_bus': None, 'image_id': '2646ca61-612e-4bc3-97f7-ee492c048835'}], 'ephemerals': [], 'block_device_mapping': [{'guest_format': None, 'attachment_id': '743ad4b9-0d5e-416a-85f8-c3bd836b015a', 'boot_index': None, 'delete_on_termination': False, 'mount_device': '/dev/sdb', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402188', 'volume_id': '2cfc46b0-10e9-4f4c-8f58-7fff36954695', 'name': 'volume-2cfc46b0-10e9-4f4c-8f58-7fff36954695', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c28e7c21-7e7d-4cda-81e8-63538bd8a1f7', 'attached_at': '', 'detached_at': '', 'volume_id': '2cfc46b0-10e9-4f4c-8f58-7fff36954695', 'serial': '2cfc46b0-10e9-4f4c-8f58-7fff36954695'}, 'device_type': None, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=63024) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1956.345684] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Volume attach. Driver type: vmdk {{(pid=63024) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1956.345684] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402188', 'volume_id': '2cfc46b0-10e9-4f4c-8f58-7fff36954695', 'name': 'volume-2cfc46b0-10e9-4f4c-8f58-7fff36954695', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c28e7c21-7e7d-4cda-81e8-63538bd8a1f7', 'attached_at': '', 'detached_at': '', 'volume_id': '2cfc46b0-10e9-4f4c-8f58-7fff36954695', 'serial': '2cfc46b0-10e9-4f4c-8f58-7fff36954695'} {{(pid=63024) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1956.348025] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3562ad2-ae8f-4b72-9bf6-b18058bcf213 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.374774] env[63024]: DEBUG nova.network.neutron [req-dacc17c6-1ac5-4f82-8eeb-b33eaa362ff2 req-282fce54-7b7c-4daf-a9bc-2f65ace9aea6 service nova] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1956.375831] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9baea1f2-61e4-450c-b5d8-1f297c9c755c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.413157] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] volume-2cfc46b0-10e9-4f4c-8f58-7fff36954695/volume-2cfc46b0-10e9-4f4c-8f58-7fff36954695.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1956.418336] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6b88bd01-f235-4c8a-91d0-c1e8e5f3fbb5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.438053] env[63024]: DEBUG nova.objects.instance [None req-87c66ce0-4506-4997-9b5d-3dbfecd5acba tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Lazy-loading 'flavor' on Instance uuid 839776ef-0562-424d-b301-2aa896f32e14 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1956.444805] env[63024]: INFO nova.compute.manager [-] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Took 2.46 seconds to deallocate network for instance. [ 1956.452158] env[63024]: DEBUG oslo_concurrency.lockutils [None req-05bb1c70-3b7e-4b59-9d2a-2214d5bafd95 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Lock "b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.074s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1956.455057] env[63024]: INFO nova.compute.manager [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Took 28.37 seconds to build instance. [ 1956.457359] env[63024]: DEBUG oslo_vmware.api [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1956.457359] env[63024]: value = "task-1951423" [ 1956.457359] env[63024]: _type = "Task" [ 1956.457359] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1956.469481] env[63024]: DEBUG oslo_vmware.api [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951423, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1956.569293] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951422, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1956.769315] env[63024]: DEBUG nova.compute.manager [req-799f9eee-4502-40d8-8bdd-99e09dd01ee2 req-5c854656-465c-4e80-a890-96472eecfea5 service nova] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Received event network-vif-plugged-119df22d-0c91-429b-8927-6e0b9a7412f5 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1956.769562] env[63024]: DEBUG oslo_concurrency.lockutils [req-799f9eee-4502-40d8-8bdd-99e09dd01ee2 req-5c854656-465c-4e80-a890-96472eecfea5 service nova] Acquiring lock "6d21976b-f519-4c87-a0d2-0a406060608d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1956.769792] env[63024]: DEBUG oslo_concurrency.lockutils [req-799f9eee-4502-40d8-8bdd-99e09dd01ee2 req-5c854656-465c-4e80-a890-96472eecfea5 service nova] Lock "6d21976b-f519-4c87-a0d2-0a406060608d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1956.770059] env[63024]: DEBUG oslo_concurrency.lockutils [req-799f9eee-4502-40d8-8bdd-99e09dd01ee2 req-5c854656-465c-4e80-a890-96472eecfea5 service nova] Lock "6d21976b-f519-4c87-a0d2-0a406060608d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1956.770240] env[63024]: DEBUG nova.compute.manager [req-799f9eee-4502-40d8-8bdd-99e09dd01ee2 req-5c854656-465c-4e80-a890-96472eecfea5 service nova] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] No waiting events found dispatching network-vif-plugged-119df22d-0c91-429b-8927-6e0b9a7412f5 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1956.770411] env[63024]: WARNING nova.compute.manager [req-799f9eee-4502-40d8-8bdd-99e09dd01ee2 req-5c854656-465c-4e80-a890-96472eecfea5 service nova] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Received unexpected event network-vif-plugged-119df22d-0c91-429b-8927-6e0b9a7412f5 for instance with vm_state building and task_state spawning. [ 1956.786193] env[63024]: DEBUG nova.network.neutron [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Successfully updated port: 119df22d-0c91-429b-8927-6e0b9a7412f5 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1956.880469] env[63024]: DEBUG oslo_concurrency.lockutils [req-dacc17c6-1ac5-4f82-8eeb-b33eaa362ff2 req-282fce54-7b7c-4daf-a9bc-2f65ace9aea6 service nova] Releasing lock "refresh_cache-c71abfaa-dc65-4d1b-8a34-dff9dd682fe7" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1956.880770] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Acquired lock "refresh_cache-c71abfaa-dc65-4d1b-8a34-dff9dd682fe7" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1956.880932] env[63024]: DEBUG nova.network.neutron [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1956.957246] env[63024]: DEBUG oslo_concurrency.lockutils [None req-87c66ce0-4506-4997-9b5d-3dbfecd5acba tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Acquiring lock "refresh_cache-839776ef-0562-424d-b301-2aa896f32e14" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1956.957443] env[63024]: DEBUG oslo_concurrency.lockutils [None req-87c66ce0-4506-4997-9b5d-3dbfecd5acba tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Acquired lock "refresh_cache-839776ef-0562-424d-b301-2aa896f32e14" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1956.962184] env[63024]: DEBUG oslo_concurrency.lockutils [None req-62dc5bb4-8f42-4631-a476-b3945ca69c3a tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "9e32eb32-6eff-4875-b4a3-adfab4647023" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.882s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1956.974021] env[63024]: DEBUG oslo_vmware.api [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951423, 'name': ReconfigVM_Task, 'duration_secs': 0.351086} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1956.974021] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Reconfigured VM instance instance-0000004d to attach disk [datastore1] volume-2cfc46b0-10e9-4f4c-8f58-7fff36954695/volume-2cfc46b0-10e9-4f4c-8f58-7fff36954695.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1956.977307] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b634f957-c997-4feb-b27d-3d18bce9159f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1956.990261] env[63024]: DEBUG oslo_concurrency.lockutils [None req-83bf042f-c168-4758-836e-ec78adddd999 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1956.990390] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ef9760b-12a9-4645-9973-904336a8fcdf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.004386] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a21d8a5-7a6a-4151-a609-e094950a5132 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.008811] env[63024]: DEBUG oslo_vmware.api [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1957.008811] env[63024]: value = "task-1951424" [ 1957.008811] env[63024]: _type = "Task" [ 1957.008811] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1957.051976] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2283c866-b844-4947-89e4-98f191a433a0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.057156] env[63024]: DEBUG oslo_vmware.api [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951424, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1957.065116] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a880258-2f7e-4034-9c52-3eaf968ae09b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.076520] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951422, 'name': CreateVM_Task, 'duration_secs': 0.592685} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1957.084646] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1957.085197] env[63024]: DEBUG nova.compute.provider_tree [None req-48e1ca48-b7bf-406a-9617-f8f25e1b770a tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1957.087783] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1957.087909] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1957.088194] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1957.088814] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90e6ba73-6564-4282-80af-4fc7270efed2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.095526] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Waiting for the task: (returnval){ [ 1957.095526] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52217e9c-68ef-68ad-b495-0ea8033019ef" [ 1957.095526] env[63024]: _type = "Task" [ 1957.095526] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1957.105375] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52217e9c-68ef-68ad-b495-0ea8033019ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1957.146456] env[63024]: DEBUG oslo_concurrency.lockutils [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Acquiring lock "51bdfe4a-2439-4ad5-97f3-f60c70c87b9d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1957.147329] env[63024]: DEBUG oslo_concurrency.lockutils [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Lock "51bdfe4a-2439-4ad5-97f3-f60c70c87b9d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1957.289067] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Acquiring lock "refresh_cache-6d21976b-f519-4c87-a0d2-0a406060608d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1957.289373] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Acquired lock "refresh_cache-6d21976b-f519-4c87-a0d2-0a406060608d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1957.289548] env[63024]: DEBUG nova.network.neutron [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1957.437643] env[63024]: DEBUG nova.network.neutron [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1957.442920] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1853ceda-5f98-4669-95bc-ece1a6728158 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquiring lock "82b7019c-5049-4b8b-abb4-46f326ce3d5b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1957.444445] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1853ceda-5f98-4669-95bc-ece1a6728158 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Lock "82b7019c-5049-4b8b-abb4-46f326ce3d5b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1957.444445] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1853ceda-5f98-4669-95bc-ece1a6728158 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquiring lock "82b7019c-5049-4b8b-abb4-46f326ce3d5b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1957.444445] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1853ceda-5f98-4669-95bc-ece1a6728158 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Lock "82b7019c-5049-4b8b-abb4-46f326ce3d5b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1957.444445] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1853ceda-5f98-4669-95bc-ece1a6728158 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Lock "82b7019c-5049-4b8b-abb4-46f326ce3d5b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1957.451198] env[63024]: INFO nova.compute.manager [None req-1853ceda-5f98-4669-95bc-ece1a6728158 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Terminating instance [ 1957.524144] env[63024]: DEBUG oslo_vmware.api [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951424, 'name': ReconfigVM_Task, 'duration_secs': 0.169826} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1957.527705] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402188', 'volume_id': '2cfc46b0-10e9-4f4c-8f58-7fff36954695', 'name': 'volume-2cfc46b0-10e9-4f4c-8f58-7fff36954695', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c28e7c21-7e7d-4cda-81e8-63538bd8a1f7', 'attached_at': '', 'detached_at': '', 'volume_id': '2cfc46b0-10e9-4f4c-8f58-7fff36954695', 'serial': '2cfc46b0-10e9-4f4c-8f58-7fff36954695'} {{(pid=63024) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1957.528399] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e51b80d4-ae0a-49f6-a7b0-e4a56cafdf3a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.539997] env[63024]: DEBUG oslo_vmware.api [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1957.539997] env[63024]: value = "task-1951425" [ 1957.539997] env[63024]: _type = "Task" [ 1957.539997] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1957.550214] env[63024]: DEBUG oslo_vmware.api [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951425, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1957.590962] env[63024]: DEBUG nova.scheduler.client.report [None req-48e1ca48-b7bf-406a-9617-f8f25e1b770a tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1957.622728] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52217e9c-68ef-68ad-b495-0ea8033019ef, 'name': SearchDatastore_Task, 'duration_secs': 0.011636} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1957.623380] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1957.623818] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1957.624478] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1957.625267] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1957.625267] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1957.625602] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0b8fc774-3a31-4ce6-9cd9-8e010144fbdd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.637766] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1957.640305] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1957.640305] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-768e6b02-f02c-4fe2-8036-e8720db4c6e2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.646586] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Waiting for the task: (returnval){ [ 1957.646586] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e687ad-ead0-a7fd-149a-0b415615dc73" [ 1957.646586] env[63024]: _type = "Task" [ 1957.646586] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1957.650249] env[63024]: DEBUG nova.compute.manager [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1957.660155] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e687ad-ead0-a7fd-149a-0b415615dc73, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1957.730709] env[63024]: DEBUG nova.network.neutron [None req-87c66ce0-4506-4997-9b5d-3dbfecd5acba tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1957.789602] env[63024]: DEBUG nova.network.neutron [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Updating instance_info_cache with network_info: [{"id": "71970f6a-12a4-4779-832b-7b00ff046697", "address": "fa:16:3e:60:84:8d", "network": {"id": "44fbdd08-2faf-4ea7-b8c6-4f2045ca0856", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-573137423-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "179b837229c642f1b495b236fd98e8eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aaf1b231-3660-4453-b4f3-44d825b9a5dd", "external-id": "nsx-vlan-transportzone-6", "segmentation_id": 6, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71970f6a-12", "ovs_interfaceid": "71970f6a-12a4-4779-832b-7b00ff046697", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1957.843990] env[63024]: DEBUG nova.network.neutron [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1957.958885] env[63024]: DEBUG nova.compute.manager [None req-1853ceda-5f98-4669-95bc-ece1a6728158 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1957.959196] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1853ceda-5f98-4669-95bc-ece1a6728158 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1957.960149] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72c6fef6-1e56-45e2-86ba-4798143e820b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.968959] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-1853ceda-5f98-4669-95bc-ece1a6728158 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1957.970430] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e7a71fea-02c8-4eca-aa39-9476ed7c2e3f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1957.973320] env[63024]: DEBUG nova.compute.manager [req-6c9e7f18-57e5-41b7-98fe-f6282b530f49 req-c3cf860b-6c04-4520-a2b4-9e9c7a8b58a2 service nova] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Received event network-vif-deleted-989997b7-12bd-4924-97e2-a65914c47536 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1957.984561] env[63024]: DEBUG oslo_vmware.api [None req-1853ceda-5f98-4669-95bc-ece1a6728158 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1957.984561] env[63024]: value = "task-1951426" [ 1957.984561] env[63024]: _type = "Task" [ 1957.984561] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1957.994274] env[63024]: DEBUG oslo_vmware.api [None req-1853ceda-5f98-4669-95bc-ece1a6728158 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951426, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1958.055560] env[63024]: DEBUG oslo_vmware.api [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951425, 'name': Rename_Task, 'duration_secs': 0.166443} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1958.056089] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1958.056822] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bb01d6fa-c8f9-4fb2-a0a3-d37efcc35c56 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.065782] env[63024]: DEBUG oslo_vmware.api [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1958.065782] env[63024]: value = "task-1951427" [ 1958.065782] env[63024]: _type = "Task" [ 1958.065782] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1958.079677] env[63024]: DEBUG oslo_vmware.api [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951427, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1958.103024] env[63024]: DEBUG oslo_concurrency.lockutils [None req-48e1ca48-b7bf-406a-9617-f8f25e1b770a tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.197s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1958.105270] env[63024]: DEBUG oslo_concurrency.lockutils [None req-909f7370-194b-47ad-bdaf-14b2067cafeb tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.829s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1958.105622] env[63024]: DEBUG nova.objects.instance [None req-909f7370-194b-47ad-bdaf-14b2067cafeb tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Lazy-loading 'resources' on Instance uuid 96afa44e-d8c6-419c-ae69-04b7b306c2c5 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1958.130904] env[63024]: INFO nova.scheduler.client.report [None req-48e1ca48-b7bf-406a-9617-f8f25e1b770a tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Deleted allocations for instance 669c45b0-34d6-45f8-a30e-b9b96cfd71ef [ 1958.155943] env[63024]: DEBUG nova.network.neutron [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Updating instance_info_cache with network_info: [{"id": "119df22d-0c91-429b-8927-6e0b9a7412f5", "address": "fa:16:3e:df:47:fb", "network": {"id": "44fbdd08-2faf-4ea7-b8c6-4f2045ca0856", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-573137423-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "179b837229c642f1b495b236fd98e8eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aaf1b231-3660-4453-b4f3-44d825b9a5dd", "external-id": "nsx-vlan-transportzone-6", "segmentation_id": 6, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap119df22d-0c", "ovs_interfaceid": "119df22d-0c91-429b-8927-6e0b9a7412f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1958.169891] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e687ad-ead0-a7fd-149a-0b415615dc73, 'name': SearchDatastore_Task, 'duration_secs': 0.014086} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1958.176835] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4fe5a5ec-86da-4043-8650-9b71b958f048 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.186338] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Waiting for the task: (returnval){ [ 1958.186338] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5214cb85-1cb7-8016-2c4a-28c6b6aceab5" [ 1958.186338] env[63024]: _type = "Task" [ 1958.186338] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1958.190578] env[63024]: DEBUG oslo_concurrency.lockutils [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1958.198057] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5214cb85-1cb7-8016-2c4a-28c6b6aceab5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1958.294592] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Releasing lock "refresh_cache-c71abfaa-dc65-4d1b-8a34-dff9dd682fe7" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1958.295116] env[63024]: DEBUG nova.compute.manager [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Instance network_info: |[{"id": "71970f6a-12a4-4779-832b-7b00ff046697", "address": "fa:16:3e:60:84:8d", "network": {"id": "44fbdd08-2faf-4ea7-b8c6-4f2045ca0856", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-573137423-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "179b837229c642f1b495b236fd98e8eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aaf1b231-3660-4453-b4f3-44d825b9a5dd", "external-id": "nsx-vlan-transportzone-6", "segmentation_id": 6, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap71970f6a-12", "ovs_interfaceid": "71970f6a-12a4-4779-832b-7b00ff046697", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1958.295780] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:60:84:8d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aaf1b231-3660-4453-b4f3-44d825b9a5dd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '71970f6a-12a4-4779-832b-7b00ff046697', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1958.309933] env[63024]: DEBUG oslo.service.loopingcall [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1958.310378] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1958.310730] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8c975582-f056-4b24-af7c-2139b2bcc0e8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.349576] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1958.349576] env[63024]: value = "task-1951428" [ 1958.349576] env[63024]: _type = "Task" [ 1958.349576] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1958.359632] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951428, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1958.496566] env[63024]: DEBUG oslo_vmware.api [None req-1853ceda-5f98-4669-95bc-ece1a6728158 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951426, 'name': PowerOffVM_Task, 'duration_secs': 0.379794} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1958.497142] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-1853ceda-5f98-4669-95bc-ece1a6728158 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1958.497457] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1853ceda-5f98-4669-95bc-ece1a6728158 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1958.497829] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-969e53ae-1117-4fa1-a080-12e3da8d83b1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.582395] env[63024]: DEBUG oslo_vmware.api [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951427, 'name': PowerOnVM_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1958.642417] env[63024]: DEBUG oslo_concurrency.lockutils [None req-48e1ca48-b7bf-406a-9617-f8f25e1b770a tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Lock "669c45b0-34d6-45f8-a30e-b9b96cfd71ef" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.322s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1958.662068] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Releasing lock "refresh_cache-6d21976b-f519-4c87-a0d2-0a406060608d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1958.662068] env[63024]: DEBUG nova.compute.manager [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Instance network_info: |[{"id": "119df22d-0c91-429b-8927-6e0b9a7412f5", "address": "fa:16:3e:df:47:fb", "network": {"id": "44fbdd08-2faf-4ea7-b8c6-4f2045ca0856", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-573137423-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "179b837229c642f1b495b236fd98e8eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aaf1b231-3660-4453-b4f3-44d825b9a5dd", "external-id": "nsx-vlan-transportzone-6", "segmentation_id": 6, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap119df22d-0c", "ovs_interfaceid": "119df22d-0c91-429b-8927-6e0b9a7412f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1958.662068] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:df:47:fb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aaf1b231-3660-4453-b4f3-44d825b9a5dd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '119df22d-0c91-429b-8927-6e0b9a7412f5', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1958.671238] env[63024]: DEBUG oslo.service.loopingcall [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1958.675805] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1958.676986] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-92518275-9012-4826-9e95-de5880986463 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.706238] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c6d5aa65-9c13-4f2c-9dd6-73a60a418546 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "9e8e7b6e-1bb2-4e66-b734-2f56e31302af" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1958.707189] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c6d5aa65-9c13-4f2c-9dd6-73a60a418546 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "9e8e7b6e-1bb2-4e66-b734-2f56e31302af" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1958.707735] env[63024]: DEBUG nova.compute.manager [None req-c6d5aa65-9c13-4f2c-9dd6-73a60a418546 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Going to confirm migration 4 {{(pid=63024) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5113}} [ 1958.719095] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5214cb85-1cb7-8016-2c4a-28c6b6aceab5, 'name': SearchDatastore_Task, 'duration_secs': 0.013525} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1958.719704] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1958.719704] env[63024]: value = "task-1951430" [ 1958.719704] env[63024]: _type = "Task" [ 1958.719704] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1958.726022] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1958.726022] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 12e63b42-5554-44d5-86eb-d592bc0b2ad6/12e63b42-5554-44d5-86eb-d592bc0b2ad6.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1958.726022] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-aa33ae10-8bbc-4bb9-b57d-7151d6e44af8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.743832] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951430, 'name': CreateVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1958.750720] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Waiting for the task: (returnval){ [ 1958.750720] env[63024]: value = "task-1951431" [ 1958.750720] env[63024]: _type = "Task" [ 1958.750720] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1958.760778] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951431, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1958.773698] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1853ceda-5f98-4669-95bc-ece1a6728158 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1958.773970] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1853ceda-5f98-4669-95bc-ece1a6728158 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1958.777054] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-1853ceda-5f98-4669-95bc-ece1a6728158 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Deleting the datastore file [datastore1] 82b7019c-5049-4b8b-abb4-46f326ce3d5b {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1958.777054] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-193c426e-ec22-4415-bd55-7e50e2056dce {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1958.784463] env[63024]: DEBUG oslo_vmware.api [None req-1853ceda-5f98-4669-95bc-ece1a6728158 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1958.784463] env[63024]: value = "task-1951432" [ 1958.784463] env[63024]: _type = "Task" [ 1958.784463] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1958.824916] env[63024]: DEBUG oslo_vmware.api [None req-1853ceda-5f98-4669-95bc-ece1a6728158 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951432, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1958.861150] env[63024]: DEBUG nova.network.neutron [None req-87c66ce0-4506-4997-9b5d-3dbfecd5acba tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Updating instance_info_cache with network_info: [{"id": "c8c1b0a3-d31e-4600-b1be-f31f6b4b4071", "address": "fa:16:3e:30:da:7e", "network": {"id": "c6d5dbfd-7cfb-4a2d-a8dd-088f4c0b9461", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-2085991502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.194", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45d36e5894294d9b875bb0c69c7c2a7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db68bd64-5b56-49af-a075-13dcf85cb2e0", "external-id": "nsx-vlan-transportzone-590", "segmentation_id": 590, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8c1b0a3-d3", "ovs_interfaceid": "c8c1b0a3-d31e-4600-b1be-f31f6b4b4071", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1958.868406] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951428, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1958.917574] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "e9784dce-9a3f-4969-b48c-9c5b17959d88" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1958.917840] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "e9784dce-9a3f-4969-b48c-9c5b17959d88" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1959.086401] env[63024]: DEBUG oslo_vmware.api [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951427, 'name': PowerOnVM_Task, 'duration_secs': 0.57024} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1959.086691] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1959.086907] env[63024]: DEBUG nova.compute.manager [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1959.087870] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c17079c5-9a31-4166-97ba-0933270c2cc7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.150685] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0c7b127-efc2-482a-a989-c2244547a0b6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.159421] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de29f4c0-2f2a-4616-b9c5-60f4ac830e72 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.197236] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24afede0-bf9d-4ada-983e-75cce443ca07 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.212788] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-363917a7-4740-4e1f-aa50-8608d82df513 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.233364] env[63024]: DEBUG nova.compute.provider_tree [None req-909f7370-194b-47ad-bdaf-14b2067cafeb tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1959.250823] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951430, 'name': CreateVM_Task, 'duration_secs': 0.526518} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1959.254142] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1959.255238] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1959.255408] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1959.255743] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1959.256454] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-146d70ae-379b-405b-966b-cccfa206e975 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.263172] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951431, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1959.264676] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Waiting for the task: (returnval){ [ 1959.264676] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]526ceef9-4283-270b-a4ac-ad12c5238a00" [ 1959.264676] env[63024]: _type = "Task" [ 1959.264676] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1959.278755] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]526ceef9-4283-270b-a4ac-ad12c5238a00, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1959.305238] env[63024]: DEBUG oslo_vmware.api [None req-1853ceda-5f98-4669-95bc-ece1a6728158 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951432, 'name': DeleteDatastoreFile_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1959.311357] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c6d5aa65-9c13-4f2c-9dd6-73a60a418546 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "refresh_cache-9e8e7b6e-1bb2-4e66-b734-2f56e31302af" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1959.311597] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c6d5aa65-9c13-4f2c-9dd6-73a60a418546 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquired lock "refresh_cache-9e8e7b6e-1bb2-4e66-b734-2f56e31302af" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1959.311796] env[63024]: DEBUG nova.network.neutron [None req-c6d5aa65-9c13-4f2c-9dd6-73a60a418546 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1959.312039] env[63024]: DEBUG nova.objects.instance [None req-c6d5aa65-9c13-4f2c-9dd6-73a60a418546 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lazy-loading 'info_cache' on Instance uuid 9e8e7b6e-1bb2-4e66-b734-2f56e31302af {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1959.329399] env[63024]: DEBUG nova.compute.manager [req-9d437dcc-f22c-4af2-ab46-67d978002a60 req-e4f2b448-120f-4a7f-837f-2dd085e58351 service nova] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Received event network-changed-119df22d-0c91-429b-8927-6e0b9a7412f5 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1959.329630] env[63024]: DEBUG nova.compute.manager [req-9d437dcc-f22c-4af2-ab46-67d978002a60 req-e4f2b448-120f-4a7f-837f-2dd085e58351 service nova] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Refreshing instance network info cache due to event network-changed-119df22d-0c91-429b-8927-6e0b9a7412f5. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1959.329831] env[63024]: DEBUG oslo_concurrency.lockutils [req-9d437dcc-f22c-4af2-ab46-67d978002a60 req-e4f2b448-120f-4a7f-837f-2dd085e58351 service nova] Acquiring lock "refresh_cache-6d21976b-f519-4c87-a0d2-0a406060608d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1959.330024] env[63024]: DEBUG oslo_concurrency.lockutils [req-9d437dcc-f22c-4af2-ab46-67d978002a60 req-e4f2b448-120f-4a7f-837f-2dd085e58351 service nova] Acquired lock "refresh_cache-6d21976b-f519-4c87-a0d2-0a406060608d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1959.330162] env[63024]: DEBUG nova.network.neutron [req-9d437dcc-f22c-4af2-ab46-67d978002a60 req-e4f2b448-120f-4a7f-837f-2dd085e58351 service nova] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Refreshing network info cache for port 119df22d-0c91-429b-8927-6e0b9a7412f5 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1959.367818] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951428, 'name': CreateVM_Task, 'duration_secs': 0.629945} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1959.368014] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1959.368789] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1959.369263] env[63024]: DEBUG oslo_concurrency.lockutils [None req-87c66ce0-4506-4997-9b5d-3dbfecd5acba tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Releasing lock "refresh_cache-839776ef-0562-424d-b301-2aa896f32e14" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1959.369472] env[63024]: DEBUG nova.compute.manager [None req-87c66ce0-4506-4997-9b5d-3dbfecd5acba tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Inject network info {{(pid=63024) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7549}} [ 1959.369718] env[63024]: DEBUG nova.compute.manager [None req-87c66ce0-4506-4997-9b5d-3dbfecd5acba tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] [instance: 839776ef-0562-424d-b301-2aa896f32e14] network_info to inject: |[{"id": "c8c1b0a3-d31e-4600-b1be-f31f6b4b4071", "address": "fa:16:3e:30:da:7e", "network": {"id": "c6d5dbfd-7cfb-4a2d-a8dd-088f4c0b9461", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-2085991502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.194", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45d36e5894294d9b875bb0c69c7c2a7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db68bd64-5b56-49af-a075-13dcf85cb2e0", "external-id": "nsx-vlan-transportzone-590", "segmentation_id": 590, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8c1b0a3-d3", "ovs_interfaceid": "c8c1b0a3-d31e-4600-b1be-f31f6b4b4071", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7550}} [ 1959.374691] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-87c66ce0-4506-4997-9b5d-3dbfecd5acba tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Reconfiguring VM instance to set the machine id {{(pid=63024) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1959.374987] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cb73ef7c-f394-44e9-aea2-7f733288527c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.394304] env[63024]: DEBUG oslo_vmware.api [None req-87c66ce0-4506-4997-9b5d-3dbfecd5acba tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Waiting for the task: (returnval){ [ 1959.394304] env[63024]: value = "task-1951433" [ 1959.394304] env[63024]: _type = "Task" [ 1959.394304] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1959.403815] env[63024]: DEBUG oslo_vmware.api [None req-87c66ce0-4506-4997-9b5d-3dbfecd5acba tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Task: {'id': task-1951433, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1959.424455] env[63024]: DEBUG nova.compute.manager [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1959.615259] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1959.741795] env[63024]: DEBUG nova.scheduler.client.report [None req-909f7370-194b-47ad-bdaf-14b2067cafeb tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1959.762747] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951431, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.679268} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1959.763049] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 12e63b42-5554-44d5-86eb-d592bc0b2ad6/12e63b42-5554-44d5-86eb-d592bc0b2ad6.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1959.763281] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1959.763870] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2bfad9ca-8c41-4654-8674-dfa42107b8c7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.777605] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]526ceef9-4283-270b-a4ac-ad12c5238a00, 'name': SearchDatastore_Task, 'duration_secs': 0.064107} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1959.778560] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1959.779653] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1959.779653] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1959.779653] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1959.779653] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1959.780064] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Waiting for the task: (returnval){ [ 1959.780064] env[63024]: value = "task-1951434" [ 1959.780064] env[63024]: _type = "Task" [ 1959.780064] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1959.780188] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1959.780463] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1959.780723] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a906bac2-65fe-4a94-a0ad-ba86a2795158 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.782724] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6403adb0-3ea4-4fd3-a880-da85a2054bf9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.795609] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951434, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1959.799233] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1959.799515] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1959.803585] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c9ea3d7-7f5a-489c-b71f-5229a9ca8640 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1959.806180] env[63024]: DEBUG oslo_vmware.api [None req-1853ceda-5f98-4669-95bc-ece1a6728158 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951432, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.523553} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1959.807090] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-1853ceda-5f98-4669-95bc-ece1a6728158 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1959.807313] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1853ceda-5f98-4669-95bc-ece1a6728158 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1959.807546] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1853ceda-5f98-4669-95bc-ece1a6728158 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1959.808011] env[63024]: INFO nova.compute.manager [None req-1853ceda-5f98-4669-95bc-ece1a6728158 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Took 1.85 seconds to destroy the instance on the hypervisor. [ 1959.808011] env[63024]: DEBUG oslo.service.loopingcall [None req-1853ceda-5f98-4669-95bc-ece1a6728158 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1959.808305] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Waiting for the task: (returnval){ [ 1959.808305] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5273e326-d18b-e4db-0b02-685d2c3e90c2" [ 1959.808305] env[63024]: _type = "Task" [ 1959.808305] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1959.808927] env[63024]: DEBUG nova.compute.manager [-] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1959.809066] env[63024]: DEBUG nova.network.neutron [-] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1959.818579] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Waiting for the task: (returnval){ [ 1959.818579] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52a1049f-310b-a3a2-3de7-90ec85f20a09" [ 1959.818579] env[63024]: _type = "Task" [ 1959.818579] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1959.843044] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52a1049f-310b-a3a2-3de7-90ec85f20a09, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1959.843044] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5273e326-d18b-e4db-0b02-685d2c3e90c2, 'name': SearchDatastore_Task, 'duration_secs': 0.020137} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1959.843044] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1959.843350] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1959.844039] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1959.889763] env[63024]: DEBUG nova.objects.instance [None req-5a17aef6-77eb-406b-b582-88488b3365a2 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Lazy-loading 'flavor' on Instance uuid 839776ef-0562-424d-b301-2aa896f32e14 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1959.906303] env[63024]: DEBUG oslo_vmware.api [None req-87c66ce0-4506-4997-9b5d-3dbfecd5acba tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Task: {'id': task-1951433, 'name': ReconfigVM_Task, 'duration_secs': 0.210382} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1959.906303] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-87c66ce0-4506-4997-9b5d-3dbfecd5acba tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Reconfigured VM instance to set the machine id {{(pid=63024) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1959.951688] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1960.102199] env[63024]: DEBUG nova.network.neutron [req-9d437dcc-f22c-4af2-ab46-67d978002a60 req-e4f2b448-120f-4a7f-837f-2dd085e58351 service nova] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Updated VIF entry in instance network info cache for port 119df22d-0c91-429b-8927-6e0b9a7412f5. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1960.102694] env[63024]: DEBUG nova.network.neutron [req-9d437dcc-f22c-4af2-ab46-67d978002a60 req-e4f2b448-120f-4a7f-837f-2dd085e58351 service nova] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Updating instance_info_cache with network_info: [{"id": "119df22d-0c91-429b-8927-6e0b9a7412f5", "address": "fa:16:3e:df:47:fb", "network": {"id": "44fbdd08-2faf-4ea7-b8c6-4f2045ca0856", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-573137423-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "179b837229c642f1b495b236fd98e8eb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aaf1b231-3660-4453-b4f3-44d825b9a5dd", "external-id": "nsx-vlan-transportzone-6", "segmentation_id": 6, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap119df22d-0c", "ovs_interfaceid": "119df22d-0c91-429b-8927-6e0b9a7412f5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1960.247762] env[63024]: DEBUG oslo_concurrency.lockutils [None req-909f7370-194b-47ad-bdaf-14b2067cafeb tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.143s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1960.254634] env[63024]: DEBUG oslo_concurrency.lockutils [None req-71fd91f5-f16c-47ad-b8c3-fecbda613c32 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.585s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1960.254634] env[63024]: DEBUG nova.objects.instance [None req-71fd91f5-f16c-47ad-b8c3-fecbda613c32 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Lazy-loading 'resources' on Instance uuid 6e0aa58b-85e0-4e74-812f-cc01041ed6d3 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1960.296544] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951434, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07977} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1960.296830] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1960.299058] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a80fe60-3c37-4910-b3b0-fe8af0cbe71f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.302510] env[63024]: INFO nova.scheduler.client.report [None req-909f7370-194b-47ad-bdaf-14b2067cafeb tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Deleted allocations for instance 96afa44e-d8c6-419c-ae69-04b7b306c2c5 [ 1960.326911] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] 12e63b42-5554-44d5-86eb-d592bc0b2ad6/12e63b42-5554-44d5-86eb-d592bc0b2ad6.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1960.332062] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-478ef326-3384-4a7e-826d-8e3e3485dc97 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.359020] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52a1049f-310b-a3a2-3de7-90ec85f20a09, 'name': SearchDatastore_Task, 'duration_secs': 0.020471} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1960.360857] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Waiting for the task: (returnval){ [ 1960.360857] env[63024]: value = "task-1951435" [ 1960.360857] env[63024]: _type = "Task" [ 1960.360857] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1960.361293] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-98c814fd-3e1b-495d-b538-df43636f2274 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.370555] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Waiting for the task: (returnval){ [ 1960.370555] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52117737-476e-b3b0-690e-01b5fbf6eea4" [ 1960.370555] env[63024]: _type = "Task" [ 1960.370555] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1960.373990] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951435, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1960.382686] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52117737-476e-b3b0-690e-01b5fbf6eea4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1960.394306] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5a17aef6-77eb-406b-b582-88488b3365a2 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Acquiring lock "refresh_cache-839776ef-0562-424d-b301-2aa896f32e14" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1960.394468] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5a17aef6-77eb-406b-b582-88488b3365a2 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Acquired lock "refresh_cache-839776ef-0562-424d-b301-2aa896f32e14" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1960.592107] env[63024]: DEBUG nova.compute.manager [req-14b5dd35-bead-4a93-9cdd-1487283a6be2 req-f921fd58-d663-4a08-ac1a-c745f11969dd service nova] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Received event network-vif-deleted-5dca7a03-21d2-431e-95cc-a6baa1929b65 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1960.592374] env[63024]: INFO nova.compute.manager [req-14b5dd35-bead-4a93-9cdd-1487283a6be2 req-f921fd58-d663-4a08-ac1a-c745f11969dd service nova] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Neutron deleted interface 5dca7a03-21d2-431e-95cc-a6baa1929b65; detaching it from the instance and deleting it from the info cache [ 1960.592584] env[63024]: DEBUG nova.network.neutron [req-14b5dd35-bead-4a93-9cdd-1487283a6be2 req-f921fd58-d663-4a08-ac1a-c745f11969dd service nova] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1960.597973] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3adaf09d-878d-4dee-8b48-f1ae73eb61fd tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Acquiring lock "73db94b8-cfa8-4457-bccb-d4b780edbd93" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1960.598293] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3adaf09d-878d-4dee-8b48-f1ae73eb61fd tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lock "73db94b8-cfa8-4457-bccb-d4b780edbd93" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1960.606286] env[63024]: DEBUG oslo_concurrency.lockutils [req-9d437dcc-f22c-4af2-ab46-67d978002a60 req-e4f2b448-120f-4a7f-837f-2dd085e58351 service nova] Releasing lock "refresh_cache-6d21976b-f519-4c87-a0d2-0a406060608d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1960.606286] env[63024]: DEBUG nova.compute.manager [req-9d437dcc-f22c-4af2-ab46-67d978002a60 req-e4f2b448-120f-4a7f-837f-2dd085e58351 service nova] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Received event network-changed-c8c1b0a3-d31e-4600-b1be-f31f6b4b4071 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1960.606286] env[63024]: DEBUG nova.compute.manager [req-9d437dcc-f22c-4af2-ab46-67d978002a60 req-e4f2b448-120f-4a7f-837f-2dd085e58351 service nova] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Refreshing instance network info cache due to event network-changed-c8c1b0a3-d31e-4600-b1be-f31f6b4b4071. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1960.606286] env[63024]: DEBUG oslo_concurrency.lockutils [req-9d437dcc-f22c-4af2-ab46-67d978002a60 req-e4f2b448-120f-4a7f-837f-2dd085e58351 service nova] Acquiring lock "refresh_cache-839776ef-0562-424d-b301-2aa896f32e14" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1960.729435] env[63024]: DEBUG nova.network.neutron [None req-c6d5aa65-9c13-4f2c-9dd6-73a60a418546 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Updating instance_info_cache with network_info: [{"id": "6aa34054-6865-4348-9871-fd32c747ab34", "address": "fa:16:3e:82:1f:7b", "network": {"id": "67ab6461-8fa5-4dfd-9267-561a710ae91b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1814728209-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1886be852b01400aaf7a31c8fe5d4d7a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6aa34054-68", "ovs_interfaceid": "6aa34054-6865-4348-9871-fd32c747ab34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1960.739875] env[63024]: DEBUG nova.network.neutron [None req-5a17aef6-77eb-406b-b582-88488b3365a2 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1960.833925] env[63024]: DEBUG oslo_concurrency.lockutils [None req-909f7370-194b-47ad-bdaf-14b2067cafeb tempest-MultipleCreateTestJSON-1467096459 tempest-MultipleCreateTestJSON-1467096459-project-member] Lock "96afa44e-d8c6-419c-ae69-04b7b306c2c5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.765s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1960.846899] env[63024]: DEBUG nova.network.neutron [-] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1960.878220] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951435, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1960.890120] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52117737-476e-b3b0-690e-01b5fbf6eea4, 'name': SearchDatastore_Task, 'duration_secs': 0.018379} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1960.890332] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1960.890746] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 6d21976b-f519-4c87-a0d2-0a406060608d/6d21976b-f519-4c87-a0d2-0a406060608d.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1960.890852] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1960.891226] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1960.891308] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fc2cb817-8d98-4e87-873d-bbe6c1fe684d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.893060] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3dc8aff2-3855-4460-80c3-b9da5c9475cb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.903947] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Waiting for the task: (returnval){ [ 1960.903947] env[63024]: value = "task-1951436" [ 1960.903947] env[63024]: _type = "Task" [ 1960.903947] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1960.908106] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1960.908304] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1960.911738] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4a5e764-98db-4fb1-9f8b-4c8e5e567d35 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1960.918287] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951436, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1960.922020] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Waiting for the task: (returnval){ [ 1960.922020] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]527f26da-5e7d-3338-6ed9-13ca47d29d20" [ 1960.922020] env[63024]: _type = "Task" [ 1960.922020] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1960.928907] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]527f26da-5e7d-3338-6ed9-13ca47d29d20, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1961.095482] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9fb14533-7431-46f3-b978-d229daf7388b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.101787] env[63024]: INFO nova.compute.manager [None req-3adaf09d-878d-4dee-8b48-f1ae73eb61fd tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Detaching volume ded8b478-8973-478c-b264-5807871774d2 [ 1961.107921] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91697ef3-c84a-4eec-878c-9c7609c4b9f2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.157702] env[63024]: DEBUG nova.compute.manager [req-14b5dd35-bead-4a93-9cdd-1487283a6be2 req-f921fd58-d663-4a08-ac1a-c745f11969dd service nova] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Detach interface failed, port_id=5dca7a03-21d2-431e-95cc-a6baa1929b65, reason: Instance 82b7019c-5049-4b8b-abb4-46f326ce3d5b could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 1961.162122] env[63024]: INFO nova.virt.block_device [None req-3adaf09d-878d-4dee-8b48-f1ae73eb61fd tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Attempting to driver detach volume ded8b478-8973-478c-b264-5807871774d2 from mountpoint /dev/sdb [ 1961.162856] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-3adaf09d-878d-4dee-8b48-f1ae73eb61fd tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Volume detach. Driver type: vmdk {{(pid=63024) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1961.162856] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-3adaf09d-878d-4dee-8b48-f1ae73eb61fd tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402169', 'volume_id': 'ded8b478-8973-478c-b264-5807871774d2', 'name': 'volume-ded8b478-8973-478c-b264-5807871774d2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '73db94b8-cfa8-4457-bccb-d4b780edbd93', 'attached_at': '', 'detached_at': '', 'volume_id': 'ded8b478-8973-478c-b264-5807871774d2', 'serial': 'ded8b478-8973-478c-b264-5807871774d2'} {{(pid=63024) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1961.165024] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16d5a7eb-713a-4584-ac97-50264d922894 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.188990] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e7a74b5-20cf-4d7b-ba1c-60a60c5a60bf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.197732] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee5b9adb-7e91-47c3-a32d-058b91f6785c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.223486] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca49bfe2-1363-463d-918d-bf6676d8ec60 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.240204] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c6d5aa65-9c13-4f2c-9dd6-73a60a418546 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Releasing lock "refresh_cache-9e8e7b6e-1bb2-4e66-b734-2f56e31302af" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1961.244030] env[63024]: DEBUG nova.objects.instance [None req-c6d5aa65-9c13-4f2c-9dd6-73a60a418546 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lazy-loading 'migration_context' on Instance uuid 9e8e7b6e-1bb2-4e66-b734-2f56e31302af {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1961.244030] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-3adaf09d-878d-4dee-8b48-f1ae73eb61fd tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] The volume has not been displaced from its original location: [datastore1] volume-ded8b478-8973-478c-b264-5807871774d2/volume-ded8b478-8973-478c-b264-5807871774d2.vmdk. No consolidation needed. {{(pid=63024) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1961.246981] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-3adaf09d-878d-4dee-8b48-f1ae73eb61fd tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Reconfiguring VM instance instance-00000043 to detach disk 2001 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1961.252239] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b36fadf1-6321-416b-8e13-1f06ca82caba {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.277126] env[63024]: DEBUG oslo_vmware.api [None req-3adaf09d-878d-4dee-8b48-f1ae73eb61fd tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Waiting for the task: (returnval){ [ 1961.277126] env[63024]: value = "task-1951437" [ 1961.277126] env[63024]: _type = "Task" [ 1961.277126] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1961.289267] env[63024]: DEBUG oslo_vmware.api [None req-3adaf09d-878d-4dee-8b48-f1ae73eb61fd tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951437, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1961.293160] env[63024]: DEBUG oslo_concurrency.lockutils [None req-129a35d2-41a8-4461-b277-4fa1c4724e6e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "fe6847e2-a742-4338-983f-698c13aaefde" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1961.293160] env[63024]: DEBUG oslo_concurrency.lockutils [None req-129a35d2-41a8-4461-b277-4fa1c4724e6e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "fe6847e2-a742-4338-983f-698c13aaefde" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1961.293588] env[63024]: DEBUG nova.compute.manager [None req-129a35d2-41a8-4461-b277-4fa1c4724e6e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1961.296932] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02ef82fe-8434-482c-ac6f-8d336ece3a36 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.305778] env[63024]: DEBUG nova.compute.manager [None req-129a35d2-41a8-4461-b277-4fa1c4724e6e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63024) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1961.306502] env[63024]: DEBUG nova.objects.instance [None req-129a35d2-41a8-4461-b277-4fa1c4724e6e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lazy-loading 'flavor' on Instance uuid fe6847e2-a742-4338-983f-698c13aaefde {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1961.316033] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae02ed82-3a84-431b-8633-71fbd969f08c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.334548] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16addc28-30a6-48e4-b644-1dc6138ac366 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.370125] env[63024]: INFO nova.compute.manager [-] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Took 1.56 seconds to deallocate network for instance. [ 1961.376165] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bd2b66a-68b6-4198-9d23-a34463e2d191 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.395315] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23e049f6-4b60-41a0-a503-96360c1b93d1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.400092] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951435, 'name': ReconfigVM_Task, 'duration_secs': 0.903918} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1961.400471] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Reconfigured VM instance instance-00000057 to attach disk [datastore1] 12e63b42-5554-44d5-86eb-d592bc0b2ad6/12e63b42-5554-44d5-86eb-d592bc0b2ad6.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1961.401565] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f6dabce0-b7fd-4900-8a1d-c088a5276310 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.414236] env[63024]: DEBUG nova.compute.provider_tree [None req-71fd91f5-f16c-47ad-b8c3-fecbda613c32 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1961.422968] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Waiting for the task: (returnval){ [ 1961.422968] env[63024]: value = "task-1951438" [ 1961.422968] env[63024]: _type = "Task" [ 1961.422968] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1961.444741] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951436, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1961.456938] env[63024]: DEBUG nova.compute.manager [req-cfabd33c-1e1b-4a3e-bfa1-dca4af91c908 req-9ef6524b-d8e3-4994-a3f5-92a2b471f300 service nova] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Received event network-changed-c8c1b0a3-d31e-4600-b1be-f31f6b4b4071 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1961.457241] env[63024]: DEBUG nova.compute.manager [req-cfabd33c-1e1b-4a3e-bfa1-dca4af91c908 req-9ef6524b-d8e3-4994-a3f5-92a2b471f300 service nova] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Refreshing instance network info cache due to event network-changed-c8c1b0a3-d31e-4600-b1be-f31f6b4b4071. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1961.457501] env[63024]: DEBUG oslo_concurrency.lockutils [req-cfabd33c-1e1b-4a3e-bfa1-dca4af91c908 req-9ef6524b-d8e3-4994-a3f5-92a2b471f300 service nova] Acquiring lock "refresh_cache-839776ef-0562-424d-b301-2aa896f32e14" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1961.458468] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951438, 'name': Rename_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1961.458674] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]527f26da-5e7d-3338-6ed9-13ca47d29d20, 'name': SearchDatastore_Task, 'duration_secs': 0.025267} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1961.459551] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-65d5ab5a-744c-4416-af21-a4e7c0209676 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.474062] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Waiting for the task: (returnval){ [ 1961.474062] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52f85922-100e-88b1-fcd5-cfc6517d0b7e" [ 1961.474062] env[63024]: _type = "Task" [ 1961.474062] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1961.488041] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52f85922-100e-88b1-fcd5-cfc6517d0b7e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1961.610293] env[63024]: DEBUG nova.network.neutron [None req-5a17aef6-77eb-406b-b582-88488b3365a2 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Updating instance_info_cache with network_info: [{"id": "c8c1b0a3-d31e-4600-b1be-f31f6b4b4071", "address": "fa:16:3e:30:da:7e", "network": {"id": "c6d5dbfd-7cfb-4a2d-a8dd-088f4c0b9461", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-2085991502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.194", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45d36e5894294d9b875bb0c69c7c2a7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db68bd64-5b56-49af-a075-13dcf85cb2e0", "external-id": "nsx-vlan-transportzone-590", "segmentation_id": 590, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8c1b0a3-d3", "ovs_interfaceid": "c8c1b0a3-d31e-4600-b1be-f31f6b4b4071", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1961.751890] env[63024]: DEBUG nova.objects.base [None req-c6d5aa65-9c13-4f2c-9dd6-73a60a418546 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Object Instance<9e8e7b6e-1bb2-4e66-b734-2f56e31302af> lazy-loaded attributes: info_cache,migration_context {{(pid=63024) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1961.753129] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9414880-f681-4037-9ecc-72d8ab290283 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.776027] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b347c885-44d4-4529-bd1e-ebe22b1d029d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.793055] env[63024]: DEBUG oslo_vmware.api [None req-3adaf09d-878d-4dee-8b48-f1ae73eb61fd tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951437, 'name': ReconfigVM_Task, 'duration_secs': 0.320332} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1961.793346] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-3adaf09d-878d-4dee-8b48-f1ae73eb61fd tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Reconfigured VM instance instance-00000043 to detach disk 2001 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1961.801455] env[63024]: DEBUG oslo_vmware.api [None req-c6d5aa65-9c13-4f2c-9dd6-73a60a418546 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1961.801455] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52c2fe37-7e04-8bec-0bbd-28584d482f09" [ 1961.801455] env[63024]: _type = "Task" [ 1961.801455] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1961.801756] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6b3f5638-b331-49df-b5dd-7b72a45c1ab7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.837031] env[63024]: DEBUG oslo_vmware.api [None req-c6d5aa65-9c13-4f2c-9dd6-73a60a418546 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52c2fe37-7e04-8bec-0bbd-28584d482f09, 'name': SearchDatastore_Task, 'duration_secs': 0.009527} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1961.839019] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c6d5aa65-9c13-4f2c-9dd6-73a60a418546 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1961.839742] env[63024]: DEBUG oslo_vmware.api [None req-3adaf09d-878d-4dee-8b48-f1ae73eb61fd tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Waiting for the task: (returnval){ [ 1961.839742] env[63024]: value = "task-1951439" [ 1961.839742] env[63024]: _type = "Task" [ 1961.839742] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1961.854130] env[63024]: DEBUG oslo_vmware.api [None req-3adaf09d-878d-4dee-8b48-f1ae73eb61fd tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951439, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1961.886624] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1853ceda-5f98-4669-95bc-ece1a6728158 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1961.925510] env[63024]: DEBUG nova.scheduler.client.report [None req-71fd91f5-f16c-47ad-b8c3-fecbda613c32 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1961.929258] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951436, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.765275} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1961.933057] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 6d21976b-f519-4c87-a0d2-0a406060608d/6d21976b-f519-4c87-a0d2-0a406060608d.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1961.933309] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1961.933819] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-81f68b16-fdfc-449c-ba3e-91c2eaf8340c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.942643] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951438, 'name': Rename_Task, 'duration_secs': 0.434961} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1961.944115] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1961.944495] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Waiting for the task: (returnval){ [ 1961.944495] env[63024]: value = "task-1951440" [ 1961.944495] env[63024]: _type = "Task" [ 1961.944495] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1961.944690] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2ff1c7bc-091a-4445-842f-5db570d3afa2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.956685] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951440, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1961.958976] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Waiting for the task: (returnval){ [ 1961.958976] env[63024]: value = "task-1951441" [ 1961.958976] env[63024]: _type = "Task" [ 1961.958976] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1961.967647] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951441, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1961.987940] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52f85922-100e-88b1-fcd5-cfc6517d0b7e, 'name': SearchDatastore_Task, 'duration_secs': 0.063542} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1961.988318] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1961.988593] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] c71abfaa-dc65-4d1b-8a34-dff9dd682fe7/c71abfaa-dc65-4d1b-8a34-dff9dd682fe7.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1961.988860] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0da753b3-8e1d-4b7e-9160-beb0e8231324 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1961.997960] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Waiting for the task: (returnval){ [ 1961.997960] env[63024]: value = "task-1951442" [ 1961.997960] env[63024]: _type = "Task" [ 1961.997960] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1962.008020] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951442, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1962.114021] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5a17aef6-77eb-406b-b582-88488b3365a2 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Releasing lock "refresh_cache-839776ef-0562-424d-b301-2aa896f32e14" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1962.114306] env[63024]: DEBUG nova.compute.manager [None req-5a17aef6-77eb-406b-b582-88488b3365a2 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Inject network info {{(pid=63024) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7549}} [ 1962.114572] env[63024]: DEBUG nova.compute.manager [None req-5a17aef6-77eb-406b-b582-88488b3365a2 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] [instance: 839776ef-0562-424d-b301-2aa896f32e14] network_info to inject: |[{"id": "c8c1b0a3-d31e-4600-b1be-f31f6b4b4071", "address": "fa:16:3e:30:da:7e", "network": {"id": "c6d5dbfd-7cfb-4a2d-a8dd-088f4c0b9461", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-2085991502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.194", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45d36e5894294d9b875bb0c69c7c2a7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db68bd64-5b56-49af-a075-13dcf85cb2e0", "external-id": "nsx-vlan-transportzone-590", "segmentation_id": 590, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8c1b0a3-d3", "ovs_interfaceid": "c8c1b0a3-d31e-4600-b1be-f31f6b4b4071", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7550}} [ 1962.119488] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5a17aef6-77eb-406b-b582-88488b3365a2 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Reconfiguring VM instance to set the machine id {{(pid=63024) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1962.119825] env[63024]: DEBUG oslo_concurrency.lockutils [req-9d437dcc-f22c-4af2-ab46-67d978002a60 req-e4f2b448-120f-4a7f-837f-2dd085e58351 service nova] Acquired lock "refresh_cache-839776ef-0562-424d-b301-2aa896f32e14" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1962.120053] env[63024]: DEBUG nova.network.neutron [req-9d437dcc-f22c-4af2-ab46-67d978002a60 req-e4f2b448-120f-4a7f-837f-2dd085e58351 service nova] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Refreshing network info cache for port c8c1b0a3-d31e-4600-b1be-f31f6b4b4071 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1962.121228] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-42749016-3acd-4eb5-a679-f2bf6d3f76bc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.143150] env[63024]: DEBUG oslo_vmware.api [None req-5a17aef6-77eb-406b-b582-88488b3365a2 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Waiting for the task: (returnval){ [ 1962.143150] env[63024]: value = "task-1951443" [ 1962.143150] env[63024]: _type = "Task" [ 1962.143150] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1962.152826] env[63024]: DEBUG oslo_vmware.api [None req-5a17aef6-77eb-406b-b582-88488b3365a2 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Task: {'id': task-1951443, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1962.330161] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-129a35d2-41a8-4461-b277-4fa1c4724e6e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1962.331149] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-92a45434-0137-49aa-8c50-e0f392055949 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.345058] env[63024]: DEBUG oslo_vmware.api [None req-129a35d2-41a8-4461-b277-4fa1c4724e6e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 1962.345058] env[63024]: value = "task-1951444" [ 1962.345058] env[63024]: _type = "Task" [ 1962.345058] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1962.360683] env[63024]: DEBUG oslo_concurrency.lockutils [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Acquiring lock "92d1f96e-bbe7-4654-9d3a-47ba40057157" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1962.361047] env[63024]: DEBUG oslo_concurrency.lockutils [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Lock "92d1f96e-bbe7-4654-9d3a-47ba40057157" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1962.361160] env[63024]: INFO nova.compute.manager [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Shelving [ 1962.362992] env[63024]: DEBUG oslo_vmware.api [None req-3adaf09d-878d-4dee-8b48-f1ae73eb61fd tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951439, 'name': ReconfigVM_Task, 'duration_secs': 0.250363} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1962.371288] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-3adaf09d-878d-4dee-8b48-f1ae73eb61fd tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402169', 'volume_id': 'ded8b478-8973-478c-b264-5807871774d2', 'name': 'volume-ded8b478-8973-478c-b264-5807871774d2', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '73db94b8-cfa8-4457-bccb-d4b780edbd93', 'attached_at': '', 'detached_at': '', 'volume_id': 'ded8b478-8973-478c-b264-5807871774d2', 'serial': 'ded8b478-8973-478c-b264-5807871774d2'} {{(pid=63024) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1962.372239] env[63024]: DEBUG oslo_vmware.api [None req-129a35d2-41a8-4461-b277-4fa1c4724e6e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951444, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1962.418582] env[63024]: DEBUG nova.network.neutron [req-9d437dcc-f22c-4af2-ab46-67d978002a60 req-e4f2b448-120f-4a7f-837f-2dd085e58351 service nova] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Updated VIF entry in instance network info cache for port c8c1b0a3-d31e-4600-b1be-f31f6b4b4071. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1962.421022] env[63024]: DEBUG nova.network.neutron [req-9d437dcc-f22c-4af2-ab46-67d978002a60 req-e4f2b448-120f-4a7f-837f-2dd085e58351 service nova] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Updating instance_info_cache with network_info: [{"id": "c8c1b0a3-d31e-4600-b1be-f31f6b4b4071", "address": "fa:16:3e:30:da:7e", "network": {"id": "c6d5dbfd-7cfb-4a2d-a8dd-088f4c0b9461", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-2085991502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.194", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45d36e5894294d9b875bb0c69c7c2a7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db68bd64-5b56-49af-a075-13dcf85cb2e0", "external-id": "nsx-vlan-transportzone-590", "segmentation_id": 590, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8c1b0a3-d3", "ovs_interfaceid": "c8c1b0a3-d31e-4600-b1be-f31f6b4b4071", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1962.436327] env[63024]: DEBUG oslo_concurrency.lockutils [None req-71fd91f5-f16c-47ad-b8c3-fecbda613c32 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.186s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1962.439688] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9cd6bf9b-c70f-423e-ad10-787b9b139939 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.406s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1962.440071] env[63024]: DEBUG nova.objects.instance [None req-9cd6bf9b-c70f-423e-ad10-787b9b139939 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Lazy-loading 'resources' on Instance uuid 77c27741-ee3a-4a8b-bbd3-89759288f7c6 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1962.464895] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951440, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.130056} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1962.469296] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1962.470588] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38062045-1e14-4b85-a428-927817ebcd0e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.481415] env[63024]: INFO nova.scheduler.client.report [None req-71fd91f5-f16c-47ad-b8c3-fecbda613c32 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Deleted allocations for instance 6e0aa58b-85e0-4e74-812f-cc01041ed6d3 [ 1962.505656] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] 6d21976b-f519-4c87-a0d2-0a406060608d/6d21976b-f519-4c87-a0d2-0a406060608d.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1962.512921] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-21cf3f1e-6b6b-4dab-9d16-c0e41cd07358 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.532491] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951441, 'name': PowerOnVM_Task} progress is 87%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1962.551094] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951442, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1962.553559] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Waiting for the task: (returnval){ [ 1962.553559] env[63024]: value = "task-1951445" [ 1962.553559] env[63024]: _type = "Task" [ 1962.553559] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1962.571324] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951445, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1962.655995] env[63024]: DEBUG oslo_vmware.api [None req-5a17aef6-77eb-406b-b582-88488b3365a2 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Task: {'id': task-1951443, 'name': ReconfigVM_Task, 'duration_secs': 0.238881} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1962.655995] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5a17aef6-77eb-406b-b582-88488b3365a2 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Reconfigured VM instance to set the machine id {{(pid=63024) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1962.859970] env[63024]: DEBUG oslo_vmware.api [None req-129a35d2-41a8-4461-b277-4fa1c4724e6e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951444, 'name': PowerOffVM_Task, 'duration_secs': 0.454661} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1962.860285] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-129a35d2-41a8-4461-b277-4fa1c4724e6e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1962.860760] env[63024]: DEBUG nova.compute.manager [None req-129a35d2-41a8-4461-b277-4fa1c4724e6e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1962.861284] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0359c6c9-b3bb-46f7-b8c5-d36d5381987b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1962.922209] env[63024]: DEBUG oslo_concurrency.lockutils [req-9d437dcc-f22c-4af2-ab46-67d978002a60 req-e4f2b448-120f-4a7f-837f-2dd085e58351 service nova] Releasing lock "refresh_cache-839776ef-0562-424d-b301-2aa896f32e14" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1962.924150] env[63024]: DEBUG nova.objects.instance [None req-3adaf09d-878d-4dee-8b48-f1ae73eb61fd tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lazy-loading 'flavor' on Instance uuid 73db94b8-cfa8-4457-bccb-d4b780edbd93 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1962.926049] env[63024]: DEBUG oslo_concurrency.lockutils [req-cfabd33c-1e1b-4a3e-bfa1-dca4af91c908 req-9ef6524b-d8e3-4994-a3f5-92a2b471f300 service nova] Acquired lock "refresh_cache-839776ef-0562-424d-b301-2aa896f32e14" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1962.926049] env[63024]: DEBUG nova.network.neutron [req-cfabd33c-1e1b-4a3e-bfa1-dca4af91c908 req-9ef6524b-d8e3-4994-a3f5-92a2b471f300 service nova] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Refreshing network info cache for port c8c1b0a3-d31e-4600-b1be-f31f6b4b4071 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1962.975103] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951441, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1963.026187] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951442, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.685726} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1963.026473] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] c71abfaa-dc65-4d1b-8a34-dff9dd682fe7/c71abfaa-dc65-4d1b-8a34-dff9dd682fe7.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1963.026696] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1963.026958] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a8d42c92-1912-409f-9484-39f13d4f5d0f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.035904] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Waiting for the task: (returnval){ [ 1963.035904] env[63024]: value = "task-1951446" [ 1963.035904] env[63024]: _type = "Task" [ 1963.035904] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1963.037202] env[63024]: DEBUG oslo_concurrency.lockutils [None req-71fd91f5-f16c-47ad-b8c3-fecbda613c32 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Lock "6e0aa58b-85e0-4e74-812f-cc01041ed6d3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.086s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1963.040712] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d455036d-d8b0-4f37-a400-febca7a607d9 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Acquiring lock "839776ef-0562-424d-b301-2aa896f32e14" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1963.041138] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d455036d-d8b0-4f37-a400-febca7a607d9 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Lock "839776ef-0562-424d-b301-2aa896f32e14" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1963.041138] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d455036d-d8b0-4f37-a400-febca7a607d9 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Acquiring lock "839776ef-0562-424d-b301-2aa896f32e14-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1963.041338] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d455036d-d8b0-4f37-a400-febca7a607d9 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Lock "839776ef-0562-424d-b301-2aa896f32e14-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1963.041484] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d455036d-d8b0-4f37-a400-febca7a607d9 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Lock "839776ef-0562-424d-b301-2aa896f32e14-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1963.044699] env[63024]: INFO nova.compute.manager [None req-d455036d-d8b0-4f37-a400-febca7a607d9 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Terminating instance [ 1963.052165] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951446, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1963.067626] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951445, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1963.382141] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1963.382843] env[63024]: DEBUG oslo_concurrency.lockutils [None req-129a35d2-41a8-4461-b277-4fa1c4724e6e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "fe6847e2-a742-4338-983f-698c13aaefde" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.089s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1963.384205] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3a507c87-4276-4025-8cc2-0731b91559e0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.396407] env[63024]: DEBUG oslo_vmware.api [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Waiting for the task: (returnval){ [ 1963.396407] env[63024]: value = "task-1951447" [ 1963.396407] env[63024]: _type = "Task" [ 1963.396407] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1963.410658] env[63024]: DEBUG oslo_vmware.api [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951447, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1963.412726] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d986932-d1f6-45e8-b10a-18030941c60e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.421395] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71d85848-3f46-478e-9c9f-cca2098ae996 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.463645] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-394b53b3-4c6a-4b06-be65-017feaff5029 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.475992] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951441, 'name': PowerOnVM_Task, 'duration_secs': 1.053178} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1963.478601] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1963.478838] env[63024]: INFO nova.compute.manager [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Took 13.09 seconds to spawn the instance on the hypervisor. [ 1963.479035] env[63024]: DEBUG nova.compute.manager [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1963.480563] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5509d672-7c2e-41ec-9651-673853fd4699 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.483987] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19a312fe-2f07-435b-999f-95acf8a275f7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.501729] env[63024]: DEBUG nova.compute.provider_tree [None req-9cd6bf9b-c70f-423e-ad10-787b9b139939 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1963.551726] env[63024]: DEBUG nova.compute.manager [None req-d455036d-d8b0-4f37-a400-febca7a607d9 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1963.552112] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d455036d-d8b0-4f37-a400-febca7a607d9 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1963.552567] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951446, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.297615} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1963.553762] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ba595e8-d406-43fe-ab05-0deec310bb11 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.561257] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1963.561257] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56246d3e-42a3-4cf9-b2a0-b46811497614 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.592074] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d455036d-d8b0-4f37-a400-febca7a607d9 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1963.593437] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951445, 'name': ReconfigVM_Task, 'duration_secs': 0.55369} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1963.601134] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Reconfiguring VM instance instance-00000058 to attach disk [datastore1] c71abfaa-dc65-4d1b-8a34-dff9dd682fe7/c71abfaa-dc65-4d1b-8a34-dff9dd682fe7.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1963.604029] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-57a71fe1-beaf-4e6a-8bda-2b5c22b90754 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.605738] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Reconfigured VM instance instance-00000059 to attach disk [datastore1] 6d21976b-f519-4c87-a0d2-0a406060608d/6d21976b-f519-4c87-a0d2-0a406060608d.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1963.607031] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5b9b9dc0-dcd7-4779-a98b-470e5cec5638 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.621550] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-911aa310-c366-4ef2-8b6b-43c475099ece {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.630229] env[63024]: DEBUG oslo_vmware.api [None req-d455036d-d8b0-4f37-a400-febca7a607d9 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Waiting for the task: (returnval){ [ 1963.630229] env[63024]: value = "task-1951448" [ 1963.630229] env[63024]: _type = "Task" [ 1963.630229] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1963.632609] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Waiting for the task: (returnval){ [ 1963.632609] env[63024]: value = "task-1951450" [ 1963.632609] env[63024]: _type = "Task" [ 1963.632609] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1963.632868] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Waiting for the task: (returnval){ [ 1963.632868] env[63024]: value = "task-1951449" [ 1963.632868] env[63024]: _type = "Task" [ 1963.632868] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1963.648329] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951450, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1963.655202] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951449, 'name': Rename_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1963.655492] env[63024]: DEBUG oslo_vmware.api [None req-d455036d-d8b0-4f37-a400-febca7a607d9 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Task: {'id': task-1951448, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1963.857932] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1544b179-63e2-4de2-8b24-b923fba480dc tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Acquiring lock "73db94b8-cfa8-4457-bccb-d4b780edbd93" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1963.915245] env[63024]: DEBUG oslo_vmware.api [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951447, 'name': PowerOffVM_Task, 'duration_secs': 0.320457} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1963.915245] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1963.916141] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ed93ef4-0243-43f6-bd0d-01a76a2cdee1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.942915] env[63024]: DEBUG nova.network.neutron [req-cfabd33c-1e1b-4a3e-bfa1-dca4af91c908 req-9ef6524b-d8e3-4994-a3f5-92a2b471f300 service nova] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Updated VIF entry in instance network info cache for port c8c1b0a3-d31e-4600-b1be-f31f6b4b4071. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1963.943344] env[63024]: DEBUG nova.network.neutron [req-cfabd33c-1e1b-4a3e-bfa1-dca4af91c908 req-9ef6524b-d8e3-4994-a3f5-92a2b471f300 service nova] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Updating instance_info_cache with network_info: [{"id": "c8c1b0a3-d31e-4600-b1be-f31f6b4b4071", "address": "fa:16:3e:30:da:7e", "network": {"id": "c6d5dbfd-7cfb-4a2d-a8dd-088f4c0b9461", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-2085991502-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.194", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "45d36e5894294d9b875bb0c69c7c2a7c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "db68bd64-5b56-49af-a075-13dcf85cb2e0", "external-id": "nsx-vlan-transportzone-590", "segmentation_id": 590, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc8c1b0a3-d3", "ovs_interfaceid": "c8c1b0a3-d31e-4600-b1be-f31f6b4b4071", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1963.947806] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dc8e6f2-64b8-440a-bf3d-281a40348a23 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.963502] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3adaf09d-878d-4dee-8b48-f1ae73eb61fd tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lock "73db94b8-cfa8-4457-bccb-d4b780edbd93" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.365s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1963.966527] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1544b179-63e2-4de2-8b24-b923fba480dc tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lock "73db94b8-cfa8-4457-bccb-d4b780edbd93" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.107s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1963.966527] env[63024]: DEBUG nova.compute.manager [None req-1544b179-63e2-4de2-8b24-b923fba480dc tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1963.966527] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c466ba0b-1d1d-476a-a45b-3900558077cf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1963.977117] env[63024]: DEBUG nova.compute.manager [None req-1544b179-63e2-4de2-8b24-b923fba480dc tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63024) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1963.977117] env[63024]: DEBUG nova.objects.instance [None req-1544b179-63e2-4de2-8b24-b923fba480dc tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lazy-loading 'flavor' on Instance uuid 73db94b8-cfa8-4457-bccb-d4b780edbd93 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1964.007536] env[63024]: DEBUG nova.scheduler.client.report [None req-9cd6bf9b-c70f-423e-ad10-787b9b139939 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1964.021028] env[63024]: INFO nova.compute.manager [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Took 25.87 seconds to build instance. [ 1964.152812] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951450, 'name': ReconfigVM_Task, 'duration_secs': 0.514184} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1964.160683] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Reconfigured VM instance instance-00000058 to attach disk [datastore1] c71abfaa-dc65-4d1b-8a34-dff9dd682fe7/c71abfaa-dc65-4d1b-8a34-dff9dd682fe7.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1964.161586] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951449, 'name': Rename_Task, 'duration_secs': 0.199839} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1964.161881] env[63024]: DEBUG oslo_vmware.api [None req-d455036d-d8b0-4f37-a400-febca7a607d9 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Task: {'id': task-1951448, 'name': PowerOffVM_Task, 'duration_secs': 0.250225} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1964.162152] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4947d79e-98a7-4bb8-8493-9fcff2678338 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.164393] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1964.164731] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d455036d-d8b0-4f37-a400-febca7a607d9 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1964.164958] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d455036d-d8b0-4f37-a400-febca7a607d9 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1964.165657] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6b291988-cdb6-42c9-8650-d049e9a8d1be {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.167604] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ff5b73a3-b369-4052-8537-4054285abaa1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.175878] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Waiting for the task: (returnval){ [ 1964.175878] env[63024]: value = "task-1951451" [ 1964.175878] env[63024]: _type = "Task" [ 1964.175878] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1964.176834] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Waiting for the task: (returnval){ [ 1964.176834] env[63024]: value = "task-1951452" [ 1964.176834] env[63024]: _type = "Task" [ 1964.176834] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1964.190677] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951451, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1964.194413] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951452, 'name': Rename_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1964.451847] env[63024]: DEBUG oslo_concurrency.lockutils [req-cfabd33c-1e1b-4a3e-bfa1-dca4af91c908 req-9ef6524b-d8e3-4994-a3f5-92a2b471f300 service nova] Releasing lock "refresh_cache-839776ef-0562-424d-b301-2aa896f32e14" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1964.455994] env[63024]: DEBUG oslo_concurrency.lockutils [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Acquiring lock "ec1f30e6-8410-4687-958f-f4e6e154b52f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1964.456245] env[63024]: DEBUG oslo_concurrency.lockutils [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Lock "ec1f30e6-8410-4687-958f-f4e6e154b52f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1964.460841] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Creating Snapshot of the VM instance {{(pid=63024) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1964.461152] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-bbdf9f55-b66b-459e-acef-ef2bf707a5fc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.470903] env[63024]: DEBUG oslo_vmware.api [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Waiting for the task: (returnval){ [ 1964.470903] env[63024]: value = "task-1951454" [ 1964.470903] env[63024]: _type = "Task" [ 1964.470903] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1964.484757] env[63024]: DEBUG oslo_vmware.api [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951454, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1964.520718] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9cd6bf9b-c70f-423e-ad10-787b9b139939 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.082s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1964.523495] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fb8c2cb4-713a-428e-afd2-0deded9110ae tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.335s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1964.523774] env[63024]: DEBUG nova.objects.instance [None req-fb8c2cb4-713a-428e-afd2-0deded9110ae tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Lazy-loading 'resources' on Instance uuid 601a003d-811c-4698-b0b6-054482d32c21 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1964.526059] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Lock "12e63b42-5554-44d5-86eb-d592bc0b2ad6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.388s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1964.553739] env[63024]: INFO nova.scheduler.client.report [None req-9cd6bf9b-c70f-423e-ad10-787b9b139939 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Deleted allocations for instance 77c27741-ee3a-4a8b-bbd3-89759288f7c6 [ 1964.688698] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951451, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1964.694429] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951452, 'name': Rename_Task, 'duration_secs': 0.246344} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1964.694707] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1964.694953] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-97902562-c43e-4240-a85b-4bc2a3589bd1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.702884] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Waiting for the task: (returnval){ [ 1964.702884] env[63024]: value = "task-1951455" [ 1964.702884] env[63024]: _type = "Task" [ 1964.702884] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1964.720574] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951455, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1964.828935] env[63024]: DEBUG nova.compute.manager [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Stashing vm_state: stopped {{(pid=63024) _prep_resize /opt/stack/nova/nova/compute/manager.py:5954}} [ 1964.958765] env[63024]: DEBUG nova.compute.manager [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] [instance: ec1f30e6-8410-4687-958f-f4e6e154b52f] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1964.982441] env[63024]: DEBUG oslo_vmware.api [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951454, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1964.988442] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-1544b179-63e2-4de2-8b24-b923fba480dc tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1964.988657] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a2299eb1-3cae-4d64-a1fa-f60a7d45ee0a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1964.996068] env[63024]: DEBUG oslo_vmware.api [None req-1544b179-63e2-4de2-8b24-b923fba480dc tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Waiting for the task: (returnval){ [ 1964.996068] env[63024]: value = "task-1951456" [ 1964.996068] env[63024]: _type = "Task" [ 1964.996068] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1965.006607] env[63024]: DEBUG oslo_vmware.api [None req-1544b179-63e2-4de2-8b24-b923fba480dc tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951456, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1965.063993] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9cd6bf9b-c70f-423e-ad10-787b9b139939 tempest-SecurityGroupsTestJSON-421642354 tempest-SecurityGroupsTestJSON-421642354-project-member] Lock "77c27741-ee3a-4a8b-bbd3-89759288f7c6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.623s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1965.105644] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Acquiring lock "da1f5cbc-47bf-4ee4-837a-b328de170489" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1965.105913] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Lock "da1f5cbc-47bf-4ee4-837a-b328de170489" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1965.189116] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951451, 'name': PowerOnVM_Task, 'duration_secs': 0.583868} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1965.190362] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1965.190588] env[63024]: INFO nova.compute.manager [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Took 9.46 seconds to spawn the instance on the hypervisor. [ 1965.190804] env[63024]: DEBUG nova.compute.manager [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1965.192706] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1499f52a-9495-478c-9795-2aafdd43df99 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.217776] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951455, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1965.345463] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1965.448359] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83649086-e6c0-47eb-bbe5-11ae81ca7c3c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.457850] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f77da64-4024-42af-ac82-110ad9ca061f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.497267] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ca262ef-8bc8-480b-8760-75c4837b14ed {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.509626] env[63024]: DEBUG oslo_vmware.api [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951454, 'name': CreateSnapshot_Task, 'duration_secs': 0.783417} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1965.510620] env[63024]: DEBUG oslo_concurrency.lockutils [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1965.513107] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Created Snapshot of the VM instance {{(pid=63024) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1965.513965] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f694e7b8-ef1b-401b-a523-97e033e9c037 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.517452] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ccc2af9-f8dd-44f5-bc7a-4381758e9e9c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.524015] env[63024]: DEBUG oslo_vmware.api [None req-1544b179-63e2-4de2-8b24-b923fba480dc tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951456, 'name': PowerOffVM_Task, 'duration_secs': 0.237653} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1965.525808] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-1544b179-63e2-4de2-8b24-b923fba480dc tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1965.525808] env[63024]: DEBUG nova.compute.manager [None req-1544b179-63e2-4de2-8b24-b923fba480dc tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1965.525932] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9c4f1a8-06d7-440d-ab74-2e608e80e701 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.542262] env[63024]: DEBUG nova.compute.provider_tree [None req-fb8c2cb4-713a-428e-afd2-0deded9110ae tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1965.608103] env[63024]: DEBUG nova.compute.manager [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1965.719057] env[63024]: DEBUG oslo_vmware.api [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951455, 'name': PowerOnVM_Task, 'duration_secs': 0.622321} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1965.719780] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1965.719780] env[63024]: INFO nova.compute.manager [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Took 12.71 seconds to spawn the instance on the hypervisor. [ 1965.719983] env[63024]: DEBUG nova.compute.manager [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1965.722495] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f32f8c31-ef75-41b6-986a-4584f52b7c45 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1965.725296] env[63024]: INFO nova.compute.manager [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Took 27.50 seconds to build instance. [ 1966.053452] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Creating linked-clone VM from snapshot {{(pid=63024) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1966.054674] env[63024]: DEBUG nova.scheduler.client.report [None req-fb8c2cb4-713a-428e-afd2-0deded9110ae tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1966.063990] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-51a5b41e-7490-421c-bbf5-9b7ca45d1d2b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.072010] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1544b179-63e2-4de2-8b24-b923fba480dc tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lock "73db94b8-cfa8-4457-bccb-d4b780edbd93" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.107s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1966.081944] env[63024]: DEBUG oslo_vmware.api [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Waiting for the task: (returnval){ [ 1966.081944] env[63024]: value = "task-1951457" [ 1966.081944] env[63024]: _type = "Task" [ 1966.081944] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1966.095527] env[63024]: DEBUG oslo_vmware.api [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951457, 'name': CloneVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1966.132186] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1966.228567] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Lock "6d21976b-f519-4c87-a0d2-0a406060608d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.011s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1966.228567] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d455036d-d8b0-4f37-a400-febca7a607d9 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1966.228567] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d455036d-d8b0-4f37-a400-febca7a607d9 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1966.228950] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-d455036d-d8b0-4f37-a400-febca7a607d9 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Deleting the datastore file [datastore1] 839776ef-0562-424d-b301-2aa896f32e14 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1966.231823] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-34ac6dda-c630-4b35-a478-c440127f14eb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1966.243609] env[63024]: DEBUG oslo_vmware.api [None req-d455036d-d8b0-4f37-a400-febca7a607d9 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Waiting for the task: (returnval){ [ 1966.243609] env[63024]: value = "task-1951458" [ 1966.243609] env[63024]: _type = "Task" [ 1966.243609] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1966.247194] env[63024]: INFO nova.compute.manager [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Took 28.06 seconds to build instance. [ 1966.264602] env[63024]: DEBUG oslo_vmware.api [None req-d455036d-d8b0-4f37-a400-febca7a607d9 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Task: {'id': task-1951458, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1966.337435] env[63024]: DEBUG nova.objects.instance [None req-24628e53-915c-41ee-985e-cffe636b4941 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lazy-loading 'flavor' on Instance uuid 73db94b8-cfa8-4457-bccb-d4b780edbd93 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1966.573467] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fb8c2cb4-713a-428e-afd2-0deded9110ae tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.050s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1966.576017] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.971s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1966.577610] env[63024]: INFO nova.compute.claims [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1966.595908] env[63024]: DEBUG oslo_vmware.api [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951457, 'name': CloneVM_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1966.605310] env[63024]: INFO nova.scheduler.client.report [None req-fb8c2cb4-713a-428e-afd2-0deded9110ae tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Deleted allocations for instance 601a003d-811c-4698-b0b6-054482d32c21 [ 1966.749879] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21381bff-c752-44f7-a1dd-5ccc5bbb3be5 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Lock "c71abfaa-dc65-4d1b-8a34-dff9dd682fe7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.571s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1966.761936] env[63024]: DEBUG oslo_vmware.api [None req-d455036d-d8b0-4f37-a400-febca7a607d9 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Task: {'id': task-1951458, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.254503} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1966.762580] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-d455036d-d8b0-4f37-a400-febca7a607d9 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1966.762931] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d455036d-d8b0-4f37-a400-febca7a607d9 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1966.763278] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d455036d-d8b0-4f37-a400-febca7a607d9 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1966.765101] env[63024]: INFO nova.compute.manager [None req-d455036d-d8b0-4f37-a400-febca7a607d9 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Took 3.21 seconds to destroy the instance on the hypervisor. [ 1966.765101] env[63024]: DEBUG oslo.service.loopingcall [None req-d455036d-d8b0-4f37-a400-febca7a607d9 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1966.765101] env[63024]: DEBUG nova.compute.manager [-] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1966.765101] env[63024]: DEBUG nova.network.neutron [-] [instance: 839776ef-0562-424d-b301-2aa896f32e14] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1966.845151] env[63024]: DEBUG oslo_concurrency.lockutils [None req-24628e53-915c-41ee-985e-cffe636b4941 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Acquiring lock "refresh_cache-73db94b8-cfa8-4457-bccb-d4b780edbd93" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1966.845151] env[63024]: DEBUG oslo_concurrency.lockutils [None req-24628e53-915c-41ee-985e-cffe636b4941 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Acquired lock "refresh_cache-73db94b8-cfa8-4457-bccb-d4b780edbd93" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1966.845151] env[63024]: DEBUG nova.network.neutron [None req-24628e53-915c-41ee-985e-cffe636b4941 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1966.845151] env[63024]: DEBUG nova.objects.instance [None req-24628e53-915c-41ee-985e-cffe636b4941 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lazy-loading 'info_cache' on Instance uuid 73db94b8-cfa8-4457-bccb-d4b780edbd93 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1967.100565] env[63024]: DEBUG oslo_vmware.api [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951457, 'name': CloneVM_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1967.113170] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fb8c2cb4-713a-428e-afd2-0deded9110ae tempest-ServersTestBootFromVolume-2044198158 tempest-ServersTestBootFromVolume-2044198158-project-member] Lock "601a003d-811c-4698-b0b6-054482d32c21" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.673s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1967.349293] env[63024]: DEBUG nova.objects.base [None req-24628e53-915c-41ee-985e-cffe636b4941 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Object Instance<73db94b8-cfa8-4457-bccb-d4b780edbd93> lazy-loaded attributes: flavor,info_cache {{(pid=63024) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1967.409754] env[63024]: DEBUG nova.compute.manager [req-c1c371b1-aaa4-40af-bcf1-fe5509cf60b4 req-31f0409b-5893-4a2e-aaa1-9caa1e423df0 service nova] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Received event network-vif-deleted-c8c1b0a3-d31e-4600-b1be-f31f6b4b4071 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1967.410107] env[63024]: INFO nova.compute.manager [req-c1c371b1-aaa4-40af-bcf1-fe5509cf60b4 req-31f0409b-5893-4a2e-aaa1-9caa1e423df0 service nova] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Neutron deleted interface c8c1b0a3-d31e-4600-b1be-f31f6b4b4071; detaching it from the instance and deleting it from the info cache [ 1967.410217] env[63024]: DEBUG nova.network.neutron [req-c1c371b1-aaa4-40af-bcf1-fe5509cf60b4 req-31f0409b-5893-4a2e-aaa1-9caa1e423df0 service nova] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1967.435341] env[63024]: DEBUG oslo_concurrency.lockutils [None req-521cf133-4c71-48ce-842b-94220fcaa5db tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Acquiring lock "12e63b42-5554-44d5-86eb-d592bc0b2ad6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1967.435574] env[63024]: DEBUG oslo_concurrency.lockutils [None req-521cf133-4c71-48ce-842b-94220fcaa5db tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Lock "12e63b42-5554-44d5-86eb-d592bc0b2ad6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1967.435787] env[63024]: DEBUG oslo_concurrency.lockutils [None req-521cf133-4c71-48ce-842b-94220fcaa5db tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Acquiring lock "12e63b42-5554-44d5-86eb-d592bc0b2ad6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1967.435979] env[63024]: DEBUG oslo_concurrency.lockutils [None req-521cf133-4c71-48ce-842b-94220fcaa5db tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Lock "12e63b42-5554-44d5-86eb-d592bc0b2ad6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1967.436246] env[63024]: DEBUG oslo_concurrency.lockutils [None req-521cf133-4c71-48ce-842b-94220fcaa5db tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Lock "12e63b42-5554-44d5-86eb-d592bc0b2ad6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1967.439433] env[63024]: INFO nova.compute.manager [None req-521cf133-4c71-48ce-842b-94220fcaa5db tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Terminating instance [ 1967.604330] env[63024]: DEBUG oslo_vmware.api [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951457, 'name': CloneVM_Task} progress is 95%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1967.882930] env[63024]: DEBUG nova.network.neutron [-] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1967.914694] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1a4e1714-d02a-48e6-8e58-d16bf3d26896 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.937078] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5aa44274-7ea5-4a66-a752-2cf7f25a77f6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.953770] env[63024]: DEBUG nova.compute.manager [None req-521cf133-4c71-48ce-842b-94220fcaa5db tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1967.954158] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-521cf133-4c71-48ce-842b-94220fcaa5db tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1967.955778] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20d1fdfe-2a74-46e2-b34e-a62ca84ed7cb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.968695] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-521cf133-4c71-48ce-842b-94220fcaa5db tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1967.969164] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-07c858b4-0e59-427b-8c77-4e30f30910f1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1967.977041] env[63024]: DEBUG oslo_vmware.api [None req-521cf133-4c71-48ce-842b-94220fcaa5db tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Waiting for the task: (returnval){ [ 1967.977041] env[63024]: value = "task-1951459" [ 1967.977041] env[63024]: _type = "Task" [ 1967.977041] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1968.002029] env[63024]: DEBUG nova.compute.manager [req-c1c371b1-aaa4-40af-bcf1-fe5509cf60b4 req-31f0409b-5893-4a2e-aaa1-9caa1e423df0 service nova] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Detach interface failed, port_id=c8c1b0a3-d31e-4600-b1be-f31f6b4b4071, reason: Instance 839776ef-0562-424d-b301-2aa896f32e14 could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 1968.011026] env[63024]: DEBUG oslo_vmware.api [None req-521cf133-4c71-48ce-842b-94220fcaa5db tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951459, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.069887] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78a92750-eb4a-41d6-9fc2-0fcf92bc9875 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.078618] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-962b06ea-9c27-4c30-9233-2dfe2d1c3539 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.118984] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd2d8eb1-19fc-4c08-9329-ed417bd71b12 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.133363] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08c90b82-28c8-4075-baf1-d617d0c40c57 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.140094] env[63024]: DEBUG oslo_vmware.api [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951457, 'name': CloneVM_Task, 'duration_secs': 1.668938} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1968.140485] env[63024]: INFO nova.virt.vmwareapi.vmops [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Created linked-clone VM from snapshot [ 1968.142110] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08a11c05-8d28-43a3-9de8-84aec95215cf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.153816] env[63024]: DEBUG nova.compute.provider_tree [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1968.159444] env[63024]: DEBUG nova.virt.vmwareapi.images [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Uploading image 07a796d4-2723-41a7-afac-14eecabc2bc1 {{(pid=63024) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1968.195042] env[63024]: DEBUG oslo_vmware.rw_handles [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1968.195042] env[63024]: value = "vm-402201" [ 1968.195042] env[63024]: _type = "VirtualMachine" [ 1968.195042] env[63024]: }. {{(pid=63024) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1968.195042] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-e09f826d-9bf4-4fa8-abd9-3f321e74b0d8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.202796] env[63024]: DEBUG oslo_vmware.rw_handles [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Lease: (returnval){ [ 1968.202796] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52fcbadd-d7fe-b663-ced8-918bf02d7fcf" [ 1968.202796] env[63024]: _type = "HttpNfcLease" [ 1968.202796] env[63024]: } obtained for exporting VM: (result){ [ 1968.202796] env[63024]: value = "vm-402201" [ 1968.202796] env[63024]: _type = "VirtualMachine" [ 1968.202796] env[63024]: }. {{(pid=63024) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1968.203237] env[63024]: DEBUG oslo_vmware.api [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Waiting for the lease: (returnval){ [ 1968.203237] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52fcbadd-d7fe-b663-ced8-918bf02d7fcf" [ 1968.203237] env[63024]: _type = "HttpNfcLease" [ 1968.203237] env[63024]: } to be ready. {{(pid=63024) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1968.210974] env[63024]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1968.210974] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52fcbadd-d7fe-b663-ced8-918bf02d7fcf" [ 1968.210974] env[63024]: _type = "HttpNfcLease" [ 1968.210974] env[63024]: } is initializing. {{(pid=63024) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1968.305340] env[63024]: DEBUG nova.network.neutron [None req-24628e53-915c-41ee-985e-cffe636b4941 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Updating instance_info_cache with network_info: [{"id": "f78f097c-0df1-4f4f-8941-cf21c2b2ca4b", "address": "fa:16:3e:df:09:6b", "network": {"id": "83ed1c04-a2e0-4c15-ae35-68e988607ce4", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-470202335-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.218", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5dcb1fcc9fd945cb9f4477fe1cce3f5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf78f097c-0d", "ovs_interfaceid": "f78f097c-0df1-4f4f-8941-cf21c2b2ca4b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1968.385484] env[63024]: INFO nova.compute.manager [-] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Took 1.62 seconds to deallocate network for instance. [ 1968.491170] env[63024]: DEBUG oslo_vmware.api [None req-521cf133-4c71-48ce-842b-94220fcaa5db tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951459, 'name': PowerOffVM_Task, 'duration_secs': 0.210344} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1968.493217] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-521cf133-4c71-48ce-842b-94220fcaa5db tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1968.493217] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-521cf133-4c71-48ce-842b-94220fcaa5db tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1968.493217] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-255a6d2b-6178-4687-a8f1-ff31481ff2fb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.630968] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-521cf133-4c71-48ce-842b-94220fcaa5db tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1968.631348] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-521cf133-4c71-48ce-842b-94220fcaa5db tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1968.666822] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-521cf133-4c71-48ce-842b-94220fcaa5db tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Deleting the datastore file [datastore1] 12e63b42-5554-44d5-86eb-d592bc0b2ad6 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1968.666822] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f057df65-30f5-4ad9-b38a-b8988287c570 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.666822] env[63024]: DEBUG oslo_vmware.api [None req-521cf133-4c71-48ce-842b-94220fcaa5db tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Waiting for the task: (returnval){ [ 1968.666822] env[63024]: value = "task-1951462" [ 1968.666822] env[63024]: _type = "Task" [ 1968.666822] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1968.666822] env[63024]: DEBUG oslo_vmware.api [None req-521cf133-4c71-48ce-842b-94220fcaa5db tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951462, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1968.666822] env[63024]: DEBUG nova.scheduler.client.report [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1968.713235] env[63024]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1968.713235] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52fcbadd-d7fe-b663-ced8-918bf02d7fcf" [ 1968.713235] env[63024]: _type = "HttpNfcLease" [ 1968.713235] env[63024]: } is ready. {{(pid=63024) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1968.713235] env[63024]: DEBUG oslo_vmware.rw_handles [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1968.713235] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52fcbadd-d7fe-b663-ced8-918bf02d7fcf" [ 1968.713235] env[63024]: _type = "HttpNfcLease" [ 1968.713235] env[63024]: }. {{(pid=63024) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1968.713235] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-500ff830-d08a-4552-a14b-f2e86d85446d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.722557] env[63024]: DEBUG oslo_vmware.rw_handles [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522c8c96-b530-3ce8-a17f-d6ea0ad3f049/disk-0.vmdk from lease info. {{(pid=63024) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1968.722732] env[63024]: DEBUG oslo_vmware.rw_handles [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522c8c96-b530-3ce8-a17f-d6ea0ad3f049/disk-0.vmdk for reading. {{(pid=63024) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1968.807805] env[63024]: DEBUG oslo_concurrency.lockutils [None req-24628e53-915c-41ee-985e-cffe636b4941 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Releasing lock "refresh_cache-73db94b8-cfa8-4457-bccb-d4b780edbd93" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1968.821908] env[63024]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-9a950fd4-90e4-46ff-aa31-05adffa84a75 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1968.891742] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d455036d-d8b0-4f37-a400-febca7a607d9 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1969.155656] env[63024]: DEBUG oslo_vmware.api [None req-521cf133-4c71-48ce-842b-94220fcaa5db tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951462, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140995} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1969.155924] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-521cf133-4c71-48ce-842b-94220fcaa5db tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1969.156163] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-521cf133-4c71-48ce-842b-94220fcaa5db tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1969.156309] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-521cf133-4c71-48ce-842b-94220fcaa5db tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1969.156485] env[63024]: INFO nova.compute.manager [None req-521cf133-4c71-48ce-842b-94220fcaa5db tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1969.156731] env[63024]: DEBUG oslo.service.loopingcall [None req-521cf133-4c71-48ce-842b-94220fcaa5db tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1969.156920] env[63024]: DEBUG nova.compute.manager [-] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1969.157022] env[63024]: DEBUG nova.network.neutron [-] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1969.170850] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.595s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1969.171427] env[63024]: DEBUG nova.compute.manager [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1969.174913] env[63024]: DEBUG oslo_concurrency.lockutils [None req-83bf042f-c168-4758-836e-ec78adddd999 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.185s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1969.174913] env[63024]: DEBUG nova.objects.instance [None req-83bf042f-c168-4758-836e-ec78adddd999 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lazy-loading 'resources' on Instance uuid 9a7f4452-ae50-4779-8474-11d3a6d3533f {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1969.678148] env[63024]: DEBUG nova.compute.utils [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1969.683604] env[63024]: DEBUG nova.compute.manager [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1969.684578] env[63024]: DEBUG nova.network.neutron [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1969.796258] env[63024]: DEBUG nova.policy [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'da61004f95874d87a46ccfe7d6655273', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '85c7ff3f562f46e6b21ae5d0c01f85f5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1969.814862] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-24628e53-915c-41ee-985e-cffe636b4941 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1969.815197] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4f9afd28-ee4d-46a0-aba3-96b86dde3ec5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1969.831613] env[63024]: DEBUG oslo_vmware.api [None req-24628e53-915c-41ee-985e-cffe636b4941 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Waiting for the task: (returnval){ [ 1969.831613] env[63024]: value = "task-1951463" [ 1969.831613] env[63024]: _type = "Task" [ 1969.831613] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1969.845433] env[63024]: DEBUG oslo_vmware.api [None req-24628e53-915c-41ee-985e-cffe636b4941 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951463, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1969.980530] env[63024]: DEBUG nova.compute.manager [req-834d0d4e-0d27-4379-97f3-8f2ed7809e3d req-284fdef0-9bb8-40ee-9f4f-25ae5a683de9 service nova] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Received event network-vif-deleted-1bb2d3e3-4d49-4c71-86a2-d2210cb9f711 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1969.980530] env[63024]: INFO nova.compute.manager [req-834d0d4e-0d27-4379-97f3-8f2ed7809e3d req-284fdef0-9bb8-40ee-9f4f-25ae5a683de9 service nova] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Neutron deleted interface 1bb2d3e3-4d49-4c71-86a2-d2210cb9f711; detaching it from the instance and deleting it from the info cache [ 1969.980530] env[63024]: DEBUG nova.network.neutron [req-834d0d4e-0d27-4379-97f3-8f2ed7809e3d req-284fdef0-9bb8-40ee-9f4f-25ae5a683de9 service nova] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1970.154631] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbb6e0a6-e2e5-438b-b94d-de206de48945 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.167948] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-955711c6-7009-4f0f-aee7-8dafaff4b1ff {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.207605] env[63024]: DEBUG nova.compute.manager [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1970.211368] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75b5f91d-ff83-49f8-81a1-be420d3852a4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.222363] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45e558ef-f248-4e6e-b867-e4c4fa1f7b42 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.242204] env[63024]: DEBUG nova.compute.provider_tree [None req-83bf042f-c168-4758-836e-ec78adddd999 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1970.348736] env[63024]: DEBUG oslo_vmware.api [None req-24628e53-915c-41ee-985e-cffe636b4941 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951463, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1970.378357] env[63024]: DEBUG nova.network.neutron [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Successfully created port: 8f0f2560-fa66-4a0a-b255-b2cf04c94dae {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1970.451887] env[63024]: DEBUG nova.network.neutron [-] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1970.482866] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1065d99a-c317-461e-a0b4-d4a41cc7e22f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.496482] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64eb2537-eb9d-4e2d-8bb2-e87109f8986a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.556292] env[63024]: DEBUG nova.compute.manager [req-834d0d4e-0d27-4379-97f3-8f2ed7809e3d req-284fdef0-9bb8-40ee-9f4f-25ae5a683de9 service nova] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Detach interface failed, port_id=1bb2d3e3-4d49-4c71-86a2-d2210cb9f711, reason: Instance 12e63b42-5554-44d5-86eb-d592bc0b2ad6 could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 1970.747087] env[63024]: DEBUG nova.scheduler.client.report [None req-83bf042f-c168-4758-836e-ec78adddd999 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1970.851082] env[63024]: DEBUG oslo_vmware.api [None req-24628e53-915c-41ee-985e-cffe636b4941 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951463, 'name': PowerOnVM_Task, 'duration_secs': 0.689564} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1970.853182] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-24628e53-915c-41ee-985e-cffe636b4941 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1970.853182] env[63024]: DEBUG nova.compute.manager [None req-24628e53-915c-41ee-985e-cffe636b4941 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1970.853182] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16d830a4-6eec-4903-a516-8d228f0c6b2d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1970.955535] env[63024]: INFO nova.compute.manager [-] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Took 1.80 seconds to deallocate network for instance. [ 1971.223599] env[63024]: DEBUG nova.compute.manager [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1971.253022] env[63024]: DEBUG oslo_concurrency.lockutils [None req-83bf042f-c168-4758-836e-ec78adddd999 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.077s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1971.257028] env[63024]: DEBUG oslo_concurrency.lockutils [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.064s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1971.257028] env[63024]: INFO nova.compute.claims [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1971.266846] env[63024]: DEBUG nova.virt.hardware [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1971.266846] env[63024]: DEBUG nova.virt.hardware [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1971.266846] env[63024]: DEBUG nova.virt.hardware [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1971.266846] env[63024]: DEBUG nova.virt.hardware [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1971.266846] env[63024]: DEBUG nova.virt.hardware [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1971.266846] env[63024]: DEBUG nova.virt.hardware [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1971.266846] env[63024]: DEBUG nova.virt.hardware [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1971.267238] env[63024]: DEBUG nova.virt.hardware [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1971.267283] env[63024]: DEBUG nova.virt.hardware [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1971.267426] env[63024]: DEBUG nova.virt.hardware [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1971.267637] env[63024]: DEBUG nova.virt.hardware [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1971.269120] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-512e8393-60ae-40b0-803c-17bb588c0945 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.279428] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d64cd3c1-fd3e-424f-9cda-53dc9e67cb5e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.299749] env[63024]: INFO nova.scheduler.client.report [None req-83bf042f-c168-4758-836e-ec78adddd999 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Deleted allocations for instance 9a7f4452-ae50-4779-8474-11d3a6d3533f [ 1971.463979] env[63024]: DEBUG oslo_concurrency.lockutils [None req-521cf133-4c71-48ce-842b-94220fcaa5db tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1971.811190] env[63024]: DEBUG oslo_concurrency.lockutils [None req-83bf042f-c168-4758-836e-ec78adddd999 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "9a7f4452-ae50-4779-8474-11d3a6d3533f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.218s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1972.377799] env[63024]: DEBUG nova.network.neutron [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Successfully updated port: 8f0f2560-fa66-4a0a-b255-b2cf04c94dae {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1972.382997] env[63024]: DEBUG nova.compute.manager [req-3b761702-3340-4cbf-82f3-34eb469e6443 req-61103c8f-d5db-4696-9fe4-636d447ea947 service nova] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Received event network-vif-plugged-8f0f2560-fa66-4a0a-b255-b2cf04c94dae {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1972.383681] env[63024]: DEBUG oslo_concurrency.lockutils [req-3b761702-3340-4cbf-82f3-34eb469e6443 req-61103c8f-d5db-4696-9fe4-636d447ea947 service nova] Acquiring lock "6c277ff8-ec25-4fd7-9dea-0efea9a0de29-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1972.383681] env[63024]: DEBUG oslo_concurrency.lockutils [req-3b761702-3340-4cbf-82f3-34eb469e6443 req-61103c8f-d5db-4696-9fe4-636d447ea947 service nova] Lock "6c277ff8-ec25-4fd7-9dea-0efea9a0de29-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1972.384138] env[63024]: DEBUG oslo_concurrency.lockutils [req-3b761702-3340-4cbf-82f3-34eb469e6443 req-61103c8f-d5db-4696-9fe4-636d447ea947 service nova] Lock "6c277ff8-ec25-4fd7-9dea-0efea9a0de29-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1972.384207] env[63024]: DEBUG nova.compute.manager [req-3b761702-3340-4cbf-82f3-34eb469e6443 req-61103c8f-d5db-4696-9fe4-636d447ea947 service nova] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] No waiting events found dispatching network-vif-plugged-8f0f2560-fa66-4a0a-b255-b2cf04c94dae {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1972.384446] env[63024]: WARNING nova.compute.manager [req-3b761702-3340-4cbf-82f3-34eb469e6443 req-61103c8f-d5db-4696-9fe4-636d447ea947 service nova] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Received unexpected event network-vif-plugged-8f0f2560-fa66-4a0a-b255-b2cf04c94dae for instance with vm_state building and task_state spawning. [ 1972.770285] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Acquiring lock "a694e49c-37c5-483f-b1d8-5426f6a52b73" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1972.770645] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Lock "a694e49c-37c5-483f-b1d8-5426f6a52b73" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1972.794641] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9d36d2b-2e9c-45f1-b5ab-b671aa5697d7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.803830] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdc0fc43-daca-45ac-bf65-ae20101ab44b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.839468] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cda9b44-f28c-4e4a-9daa-a4a813156e33 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.849236] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc553254-0600-4f9d-a127-d82842e12036 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1972.865013] env[63024]: DEBUG nova.compute.provider_tree [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1972.886453] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Acquiring lock "refresh_cache-6c277ff8-ec25-4fd7-9dea-0efea9a0de29" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1972.886710] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Acquired lock "refresh_cache-6c277ff8-ec25-4fd7-9dea-0efea9a0de29" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1972.886923] env[63024]: DEBUG nova.network.neutron [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1973.274380] env[63024]: DEBUG nova.compute.manager [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1973.372153] env[63024]: DEBUG nova.scheduler.client.report [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1973.464483] env[63024]: DEBUG nova.network.neutron [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1973.802900] env[63024]: DEBUG nova.network.neutron [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Updating instance_info_cache with network_info: [{"id": "8f0f2560-fa66-4a0a-b255-b2cf04c94dae", "address": "fa:16:3e:36:5f:c2", "network": {"id": "b22bfe1a-f169-41c3-ac9b-fdcf93268110", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.206", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d78401abb63840f4b461856cfdb6dbbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f0f2560-fa", "ovs_interfaceid": "8f0f2560-fa66-4a0a-b255-b2cf04c94dae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1973.807040] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1973.881450] env[63024]: DEBUG oslo_concurrency.lockutils [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.626s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1973.882388] env[63024]: DEBUG nova.compute.manager [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1973.886790] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 14.272s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1973.886833] env[63024]: DEBUG nova.objects.instance [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63024) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1974.307552] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Releasing lock "refresh_cache-6c277ff8-ec25-4fd7-9dea-0efea9a0de29" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1974.307877] env[63024]: DEBUG nova.compute.manager [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Instance network_info: |[{"id": "8f0f2560-fa66-4a0a-b255-b2cf04c94dae", "address": "fa:16:3e:36:5f:c2", "network": {"id": "b22bfe1a-f169-41c3-ac9b-fdcf93268110", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.206", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d78401abb63840f4b461856cfdb6dbbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f0f2560-fa", "ovs_interfaceid": "8f0f2560-fa66-4a0a-b255-b2cf04c94dae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1974.308337] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:36:5f:c2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8652fa2-e608-4fe6-8b35-402a40906b40', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8f0f2560-fa66-4a0a-b255-b2cf04c94dae', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1974.320284] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Creating folder: Project (85c7ff3f562f46e6b21ae5d0c01f85f5). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1974.320633] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1cb5600a-9120-4cff-98b3-f6a384b8bfab {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.339053] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Created folder: Project (85c7ff3f562f46e6b21ae5d0c01f85f5) in parent group-v401959. [ 1974.339270] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Creating folder: Instances. Parent ref: group-v402202. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1974.339522] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2ff8d072-8556-45be-94b1-c74f4c60009b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.354607] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Created folder: Instances in parent group-v402202. [ 1974.354884] env[63024]: DEBUG oslo.service.loopingcall [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1974.355105] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1974.355330] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0c9aaa2b-be95-4572-91fe-8b5af23a6c9f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1974.377308] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1974.377308] env[63024]: value = "task-1951466" [ 1974.377308] env[63024]: _type = "Task" [ 1974.377308] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1974.388240] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951466, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1974.396056] env[63024]: DEBUG nova.compute.utils [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1974.397630] env[63024]: DEBUG nova.compute.manager [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1974.397804] env[63024]: DEBUG nova.network.neutron [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1974.489743] env[63024]: DEBUG nova.policy [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6c122183f7314e8bb9e13520412a9765', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c59166884ad840669a1f90ea5f19afd3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1974.694135] env[63024]: DEBUG oslo_concurrency.lockutils [None req-780eca41-0883-4fa9-b156-2f4e256e4314 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquiring lock "01b8072a-4483-4932-8294-7e5b48e6b203" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1974.694135] env[63024]: DEBUG oslo_concurrency.lockutils [None req-780eca41-0883-4fa9-b156-2f4e256e4314 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Lock "01b8072a-4483-4932-8294-7e5b48e6b203" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1974.834059] env[63024]: DEBUG nova.compute.manager [req-9c84e2f4-1ba8-4ed7-9d1d-faedaf638ae9 req-fe7a909b-a078-459c-8f40-d305237dcaa8 service nova] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Received event network-changed-8f0f2560-fa66-4a0a-b255-b2cf04c94dae {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1974.834059] env[63024]: DEBUG nova.compute.manager [req-9c84e2f4-1ba8-4ed7-9d1d-faedaf638ae9 req-fe7a909b-a078-459c-8f40-d305237dcaa8 service nova] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Refreshing instance network info cache due to event network-changed-8f0f2560-fa66-4a0a-b255-b2cf04c94dae. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1974.834059] env[63024]: DEBUG oslo_concurrency.lockutils [req-9c84e2f4-1ba8-4ed7-9d1d-faedaf638ae9 req-fe7a909b-a078-459c-8f40-d305237dcaa8 service nova] Acquiring lock "refresh_cache-6c277ff8-ec25-4fd7-9dea-0efea9a0de29" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1974.834059] env[63024]: DEBUG oslo_concurrency.lockutils [req-9c84e2f4-1ba8-4ed7-9d1d-faedaf638ae9 req-fe7a909b-a078-459c-8f40-d305237dcaa8 service nova] Acquired lock "refresh_cache-6c277ff8-ec25-4fd7-9dea-0efea9a0de29" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1974.834059] env[63024]: DEBUG nova.network.neutron [req-9c84e2f4-1ba8-4ed7-9d1d-faedaf638ae9 req-fe7a909b-a078-459c-8f40-d305237dcaa8 service nova] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Refreshing network info cache for port 8f0f2560-fa66-4a0a-b255-b2cf04c94dae {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1974.890770] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951466, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1974.901546] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e3bcb396-0fab-4c36-ba36-1ef3ef9da69f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1974.901546] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.949s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1974.903434] env[63024]: INFO nova.compute.claims [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1974.906635] env[63024]: DEBUG nova.compute.manager [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1975.091723] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "50c72c53-ff72-42e6-afdc-14e0ac64f490" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1975.092650] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "50c72c53-ff72-42e6-afdc-14e0ac64f490" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1975.200023] env[63024]: INFO nova.compute.manager [None req-780eca41-0883-4fa9-b156-2f4e256e4314 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Detaching volume c9e6b177-cc1d-4c76-9cb0-d45b6ea1eeae [ 1975.247258] env[63024]: INFO nova.virt.block_device [None req-780eca41-0883-4fa9-b156-2f4e256e4314 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Attempting to driver detach volume c9e6b177-cc1d-4c76-9cb0-d45b6ea1eeae from mountpoint /dev/sdb [ 1975.247539] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-780eca41-0883-4fa9-b156-2f4e256e4314 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Volume detach. Driver type: vmdk {{(pid=63024) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1975.247799] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-780eca41-0883-4fa9-b156-2f4e256e4314 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402172', 'volume_id': 'c9e6b177-cc1d-4c76-9cb0-d45b6ea1eeae', 'name': 'volume-c9e6b177-cc1d-4c76-9cb0-d45b6ea1eeae', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '01b8072a-4483-4932-8294-7e5b48e6b203', 'attached_at': '', 'detached_at': '', 'volume_id': 'c9e6b177-cc1d-4c76-9cb0-d45b6ea1eeae', 'serial': 'c9e6b177-cc1d-4c76-9cb0-d45b6ea1eeae'} {{(pid=63024) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1975.248764] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-271b2b01-5c73-4a97-b545-9f90bf6ae07b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.276019] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c609bcf-5ddb-4315-9fc0-b8cda2e38240 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.285454] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3932e5fd-8d2c-4815-8851-4d5c7f0e7237 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.314195] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f8dba92-8fb5-478b-8d8f-2a93497176e4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.319150] env[63024]: DEBUG nova.network.neutron [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Successfully created port: 86c4ee07-9789-478b-a753-54e9ba818274 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1975.333570] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-780eca41-0883-4fa9-b156-2f4e256e4314 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] The volume has not been displaced from its original location: [datastore1] volume-c9e6b177-cc1d-4c76-9cb0-d45b6ea1eeae/volume-c9e6b177-cc1d-4c76-9cb0-d45b6ea1eeae.vmdk. No consolidation needed. {{(pid=63024) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1975.339170] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-780eca41-0883-4fa9-b156-2f4e256e4314 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Reconfiguring VM instance instance-00000048 to detach disk 2001 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1975.340659] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-53ebac1f-8823-4b4e-b6cb-9869cdd1e3c0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.367513] env[63024]: DEBUG oslo_vmware.api [None req-780eca41-0883-4fa9-b156-2f4e256e4314 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1975.367513] env[63024]: value = "task-1951467" [ 1975.367513] env[63024]: _type = "Task" [ 1975.367513] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1975.382481] env[63024]: DEBUG oslo_vmware.api [None req-780eca41-0883-4fa9-b156-2f4e256e4314 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951467, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1975.393545] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951466, 'name': CreateVM_Task, 'duration_secs': 0.886777} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1975.393727] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1975.394470] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1975.394698] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1975.394965] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1975.397793] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b423c9fc-07d0-4158-b936-1547aa11e6cd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.404539] env[63024]: DEBUG oslo_vmware.api [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Waiting for the task: (returnval){ [ 1975.404539] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5230c176-d563-1dc0-1e3e-bf00427c4563" [ 1975.404539] env[63024]: _type = "Task" [ 1975.404539] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1975.419620] env[63024]: DEBUG oslo_vmware.api [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5230c176-d563-1dc0-1e3e-bf00427c4563, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1975.598607] env[63024]: DEBUG nova.compute.manager [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1975.635752] env[63024]: DEBUG nova.network.neutron [req-9c84e2f4-1ba8-4ed7-9d1d-faedaf638ae9 req-fe7a909b-a078-459c-8f40-d305237dcaa8 service nova] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Updated VIF entry in instance network info cache for port 8f0f2560-fa66-4a0a-b255-b2cf04c94dae. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1975.636291] env[63024]: DEBUG nova.network.neutron [req-9c84e2f4-1ba8-4ed7-9d1d-faedaf638ae9 req-fe7a909b-a078-459c-8f40-d305237dcaa8 service nova] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Updating instance_info_cache with network_info: [{"id": "8f0f2560-fa66-4a0a-b255-b2cf04c94dae", "address": "fa:16:3e:36:5f:c2", "network": {"id": "b22bfe1a-f169-41c3-ac9b-fdcf93268110", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.206", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "d78401abb63840f4b461856cfdb6dbbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8652fa2-e608-4fe6-8b35-402a40906b40", "external-id": "nsx-vlan-transportzone-837", "segmentation_id": 837, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f0f2560-fa", "ovs_interfaceid": "8f0f2560-fa66-4a0a-b255-b2cf04c94dae", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1975.860758] env[63024]: DEBUG oslo_concurrency.lockutils [None req-519aa69f-b02f-485a-bf44-1bfb26c10be8 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "0f371c69-c7ae-4649-b038-be82e8ca74e1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1975.863154] env[63024]: DEBUG oslo_concurrency.lockutils [None req-519aa69f-b02f-485a-bf44-1bfb26c10be8 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "0f371c69-c7ae-4649-b038-be82e8ca74e1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1975.863154] env[63024]: DEBUG oslo_concurrency.lockutils [None req-519aa69f-b02f-485a-bf44-1bfb26c10be8 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "0f371c69-c7ae-4649-b038-be82e8ca74e1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1975.863154] env[63024]: DEBUG oslo_concurrency.lockutils [None req-519aa69f-b02f-485a-bf44-1bfb26c10be8 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "0f371c69-c7ae-4649-b038-be82e8ca74e1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1975.863154] env[63024]: DEBUG oslo_concurrency.lockutils [None req-519aa69f-b02f-485a-bf44-1bfb26c10be8 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "0f371c69-c7ae-4649-b038-be82e8ca74e1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1975.864273] env[63024]: INFO nova.compute.manager [None req-519aa69f-b02f-485a-bf44-1bfb26c10be8 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Terminating instance [ 1975.881265] env[63024]: DEBUG oslo_vmware.api [None req-780eca41-0883-4fa9-b156-2f4e256e4314 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951467, 'name': ReconfigVM_Task, 'duration_secs': 0.330073} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1975.883036] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-780eca41-0883-4fa9-b156-2f4e256e4314 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Reconfigured VM instance instance-00000048 to detach disk 2001 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1975.887181] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3ec7c403-1d7b-448f-a981-efdf56207aa6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.918170] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "34e4db8e-e0d9-4a27-9368-c5e711b51a29" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1975.918554] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "34e4db8e-e0d9-4a27-9368-c5e711b51a29" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1975.920356] env[63024]: DEBUG oslo_vmware.api [None req-780eca41-0883-4fa9-b156-2f4e256e4314 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1975.920356] env[63024]: value = "task-1951468" [ 1975.920356] env[63024]: _type = "Task" [ 1975.920356] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1975.921890] env[63024]: DEBUG nova.compute.manager [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1975.935522] env[63024]: DEBUG oslo_vmware.api [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5230c176-d563-1dc0-1e3e-bf00427c4563, 'name': SearchDatastore_Task, 'duration_secs': 0.033019} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1975.936586] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1975.936586] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1975.936707] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1975.936867] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1975.937692] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1975.937822] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c77d2573-9d35-45a8-a588-143ddb8e0b37 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.945450] env[63024]: DEBUG oslo_vmware.api [None req-780eca41-0883-4fa9-b156-2f4e256e4314 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951468, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1975.957296] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1975.957589] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1975.958429] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-270e1efb-181d-4684-8838-1b6bc42624e7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.964672] env[63024]: DEBUG nova.virt.hardware [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1975.965121] env[63024]: DEBUG nova.virt.hardware [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1975.965239] env[63024]: DEBUG nova.virt.hardware [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1975.965483] env[63024]: DEBUG nova.virt.hardware [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1975.965689] env[63024]: DEBUG nova.virt.hardware [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1975.965896] env[63024]: DEBUG nova.virt.hardware [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1975.966190] env[63024]: DEBUG nova.virt.hardware [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1975.966421] env[63024]: DEBUG nova.virt.hardware [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1975.966657] env[63024]: DEBUG nova.virt.hardware [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1975.966881] env[63024]: DEBUG nova.virt.hardware [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1975.967132] env[63024]: DEBUG nova.virt.hardware [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1975.968050] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60d4df40-db74-4471-88bd-09dc49d73298 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.975067] env[63024]: DEBUG oslo_vmware.api [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Waiting for the task: (returnval){ [ 1975.975067] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52f4ac14-4f12-b1ea-0ca0-027bbf68f14d" [ 1975.975067] env[63024]: _type = "Task" [ 1975.975067] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1975.982410] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65adc7ba-c0ec-45d0-a1fb-a22b166a55fd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1975.990876] env[63024]: DEBUG oslo_vmware.api [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52f4ac14-4f12-b1ea-0ca0-027bbf68f14d, 'name': SearchDatastore_Task, 'duration_secs': 0.014704} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1975.994959] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-edaa77cb-c422-4d25-85cc-86dd0127a2f8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.016694] env[63024]: DEBUG oslo_vmware.api [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Waiting for the task: (returnval){ [ 1976.016694] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52ee57fe-e2d0-a602-e42d-8d2c84e9f515" [ 1976.016694] env[63024]: _type = "Task" [ 1976.016694] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1976.023247] env[63024]: DEBUG oslo_vmware.api [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52ee57fe-e2d0-a602-e42d-8d2c84e9f515, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1976.118795] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1976.138970] env[63024]: DEBUG oslo_concurrency.lockutils [req-9c84e2f4-1ba8-4ed7-9d1d-faedaf638ae9 req-fe7a909b-a078-459c-8f40-d305237dcaa8 service nova] Releasing lock "refresh_cache-6c277ff8-ec25-4fd7-9dea-0efea9a0de29" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1976.375036] env[63024]: DEBUG nova.compute.manager [None req-519aa69f-b02f-485a-bf44-1bfb26c10be8 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1976.375079] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-519aa69f-b02f-485a-bf44-1bfb26c10be8 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1976.378495] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78229789-a4a6-4903-9dff-dea14260159e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.387263] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-519aa69f-b02f-485a-bf44-1bfb26c10be8 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1976.387544] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-72429c72-0cc1-4bb9-a99e-33d1933ff53c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.395664] env[63024]: DEBUG oslo_vmware.api [None req-519aa69f-b02f-485a-bf44-1bfb26c10be8 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 1976.395664] env[63024]: value = "task-1951469" [ 1976.395664] env[63024]: _type = "Task" [ 1976.395664] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1976.406014] env[63024]: DEBUG oslo_vmware.api [None req-519aa69f-b02f-485a-bf44-1bfb26c10be8 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951469, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1976.407821] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d78378d9-1a1b-4bf0-88f6-117632a56e3b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.415188] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-816a9bde-4f9a-417f-88e6-048436571f12 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.421591] env[63024]: DEBUG nova.compute.manager [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1976.458482] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c15c6a3a-b9a5-4578-9550-04493293f4c6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.469876] env[63024]: DEBUG oslo_vmware.api [None req-780eca41-0883-4fa9-b156-2f4e256e4314 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951468, 'name': ReconfigVM_Task, 'duration_secs': 0.186958} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1976.472531] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-780eca41-0883-4fa9-b156-2f4e256e4314 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402172', 'volume_id': 'c9e6b177-cc1d-4c76-9cb0-d45b6ea1eeae', 'name': 'volume-c9e6b177-cc1d-4c76-9cb0-d45b6ea1eeae', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '01b8072a-4483-4932-8294-7e5b48e6b203', 'attached_at': '', 'detached_at': '', 'volume_id': 'c9e6b177-cc1d-4c76-9cb0-d45b6ea1eeae', 'serial': 'c9e6b177-cc1d-4c76-9cb0-d45b6ea1eeae'} {{(pid=63024) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1976.477063] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3deb189-55a1-4db0-998f-ea4bd1591cd5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.497707] env[63024]: DEBUG nova.compute.provider_tree [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1976.525502] env[63024]: DEBUG oslo_vmware.api [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52ee57fe-e2d0-a602-e42d-8d2c84e9f515, 'name': SearchDatastore_Task, 'duration_secs': 0.014648} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1976.525771] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1976.526073] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 6c277ff8-ec25-4fd7-9dea-0efea9a0de29/6c277ff8-ec25-4fd7-9dea-0efea9a0de29.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1976.526362] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-28dc7207-04a5-4226-b5ce-57a415357c49 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.535677] env[63024]: DEBUG oslo_vmware.api [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Waiting for the task: (returnval){ [ 1976.535677] env[63024]: value = "task-1951470" [ 1976.535677] env[63024]: _type = "Task" [ 1976.535677] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1976.545976] env[63024]: DEBUG oslo_vmware.api [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Task: {'id': task-1951470, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1976.907721] env[63024]: DEBUG oslo_vmware.api [None req-519aa69f-b02f-485a-bf44-1bfb26c10be8 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951469, 'name': PowerOffVM_Task, 'duration_secs': 0.218485} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1976.908068] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-519aa69f-b02f-485a-bf44-1bfb26c10be8 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1976.908329] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-519aa69f-b02f-485a-bf44-1bfb26c10be8 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1976.908707] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cd50261f-7760-4cfa-8578-22bc6ddd7c8c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1976.974592] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1977.004218] env[63024]: DEBUG nova.scheduler.client.report [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1977.046989] env[63024]: DEBUG nova.compute.manager [req-eea8ecbc-dca7-4f7e-9dd4-b65e5aa4d0d4 req-d9fcc481-a70d-45e2-b924-f4d532cb475f service nova] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Received event network-vif-plugged-86c4ee07-9789-478b-a753-54e9ba818274 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1977.047270] env[63024]: DEBUG oslo_concurrency.lockutils [req-eea8ecbc-dca7-4f7e-9dd4-b65e5aa4d0d4 req-d9fcc481-a70d-45e2-b924-f4d532cb475f service nova] Acquiring lock "51bdfe4a-2439-4ad5-97f3-f60c70c87b9d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1977.047568] env[63024]: DEBUG oslo_concurrency.lockutils [req-eea8ecbc-dca7-4f7e-9dd4-b65e5aa4d0d4 req-d9fcc481-a70d-45e2-b924-f4d532cb475f service nova] Lock "51bdfe4a-2439-4ad5-97f3-f60c70c87b9d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1977.048953] env[63024]: DEBUG oslo_concurrency.lockutils [req-eea8ecbc-dca7-4f7e-9dd4-b65e5aa4d0d4 req-d9fcc481-a70d-45e2-b924-f4d532cb475f service nova] Lock "51bdfe4a-2439-4ad5-97f3-f60c70c87b9d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1977.048953] env[63024]: DEBUG nova.compute.manager [req-eea8ecbc-dca7-4f7e-9dd4-b65e5aa4d0d4 req-d9fcc481-a70d-45e2-b924-f4d532cb475f service nova] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] No waiting events found dispatching network-vif-plugged-86c4ee07-9789-478b-a753-54e9ba818274 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1977.048953] env[63024]: WARNING nova.compute.manager [req-eea8ecbc-dca7-4f7e-9dd4-b65e5aa4d0d4 req-d9fcc481-a70d-45e2-b924-f4d532cb475f service nova] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Received unexpected event network-vif-plugged-86c4ee07-9789-478b-a753-54e9ba818274 for instance with vm_state building and task_state spawning. [ 1977.049314] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-519aa69f-b02f-485a-bf44-1bfb26c10be8 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1977.049314] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-519aa69f-b02f-485a-bf44-1bfb26c10be8 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1977.049404] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-519aa69f-b02f-485a-bf44-1bfb26c10be8 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Deleting the datastore file [datastore1] 0f371c69-c7ae-4649-b038-be82e8ca74e1 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1977.051208] env[63024]: DEBUG nova.objects.instance [None req-780eca41-0883-4fa9-b156-2f4e256e4314 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Lazy-loading 'flavor' on Instance uuid 01b8072a-4483-4932-8294-7e5b48e6b203 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1977.053454] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-225afa70-2248-41fa-93d8-ad6a47a50eda {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.062057] env[63024]: DEBUG oslo_vmware.api [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Task: {'id': task-1951470, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1977.063648] env[63024]: DEBUG oslo_vmware.api [None req-519aa69f-b02f-485a-bf44-1bfb26c10be8 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 1977.063648] env[63024]: value = "task-1951472" [ 1977.063648] env[63024]: _type = "Task" [ 1977.063648] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1977.077641] env[63024]: DEBUG oslo_vmware.api [None req-519aa69f-b02f-485a-bf44-1bfb26c10be8 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951472, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1977.252801] env[63024]: DEBUG nova.network.neutron [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Successfully updated port: 86c4ee07-9789-478b-a753-54e9ba818274 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1977.396169] env[63024]: DEBUG oslo_vmware.rw_handles [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522c8c96-b530-3ce8-a17f-d6ea0ad3f049/disk-0.vmdk. {{(pid=63024) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1977.397123] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21729949-d039-47db-aceb-05fab9572cb6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.403932] env[63024]: DEBUG oslo_vmware.rw_handles [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522c8c96-b530-3ce8-a17f-d6ea0ad3f049/disk-0.vmdk is in state: ready. {{(pid=63024) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1977.404112] env[63024]: ERROR oslo_vmware.rw_handles [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522c8c96-b530-3ce8-a17f-d6ea0ad3f049/disk-0.vmdk due to incomplete transfer. [ 1977.404336] env[63024]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-901650ae-260c-4521-9777-b44dcd4235a5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.412078] env[63024]: DEBUG oslo_vmware.rw_handles [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522c8c96-b530-3ce8-a17f-d6ea0ad3f049/disk-0.vmdk. {{(pid=63024) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1977.412290] env[63024]: DEBUG nova.virt.vmwareapi.images [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Uploaded image 07a796d4-2723-41a7-afac-14eecabc2bc1 to the Glance image server {{(pid=63024) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1977.414676] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Destroying the VM {{(pid=63024) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1977.414911] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-12835e04-a60a-413a-847f-75c0a51b2e37 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.421782] env[63024]: DEBUG oslo_vmware.api [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Waiting for the task: (returnval){ [ 1977.421782] env[63024]: value = "task-1951473" [ 1977.421782] env[63024]: _type = "Task" [ 1977.421782] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1977.429673] env[63024]: DEBUG oslo_vmware.api [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951473, 'name': Destroy_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1977.519856] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.619s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1977.520509] env[63024]: DEBUG nova.compute.manager [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1977.523437] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c6d5aa65-9c13-4f2c-9dd6-73a60a418546 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 15.685s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1977.547851] env[63024]: DEBUG oslo_vmware.api [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Task: {'id': task-1951470, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.602514} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1977.548149] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 6c277ff8-ec25-4fd7-9dea-0efea9a0de29/6c277ff8-ec25-4fd7-9dea-0efea9a0de29.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1977.548375] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1977.548635] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a751f254-fd2b-4f4c-b9de-6f5096251b45 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.558705] env[63024]: DEBUG oslo_vmware.api [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Waiting for the task: (returnval){ [ 1977.558705] env[63024]: value = "task-1951474" [ 1977.558705] env[63024]: _type = "Task" [ 1977.558705] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1977.572351] env[63024]: DEBUG oslo_vmware.api [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Task: {'id': task-1951474, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1977.578368] env[63024]: DEBUG oslo_vmware.api [None req-519aa69f-b02f-485a-bf44-1bfb26c10be8 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951472, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.274827} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1977.578676] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-519aa69f-b02f-485a-bf44-1bfb26c10be8 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1977.578947] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-519aa69f-b02f-485a-bf44-1bfb26c10be8 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1977.579179] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-519aa69f-b02f-485a-bf44-1bfb26c10be8 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1977.579374] env[63024]: INFO nova.compute.manager [None req-519aa69f-b02f-485a-bf44-1bfb26c10be8 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1977.579646] env[63024]: DEBUG oslo.service.loopingcall [None req-519aa69f-b02f-485a-bf44-1bfb26c10be8 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1977.580138] env[63024]: DEBUG nova.compute.manager [-] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1977.580254] env[63024]: DEBUG nova.network.neutron [-] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1977.756594] env[63024]: DEBUG oslo_concurrency.lockutils [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Acquiring lock "refresh_cache-51bdfe4a-2439-4ad5-97f3-f60c70c87b9d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1977.756666] env[63024]: DEBUG oslo_concurrency.lockutils [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Acquired lock "refresh_cache-51bdfe4a-2439-4ad5-97f3-f60c70c87b9d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1977.756796] env[63024]: DEBUG nova.network.neutron [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1977.932891] env[63024]: DEBUG oslo_vmware.api [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951473, 'name': Destroy_Task, 'duration_secs': 0.414433} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1977.933309] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Destroyed the VM [ 1977.933562] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Deleting Snapshot of the VM instance {{(pid=63024) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1977.933826] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-0ded872d-3d6c-49b0-a19d-fa45032dff67 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1977.942263] env[63024]: DEBUG oslo_vmware.api [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Waiting for the task: (returnval){ [ 1977.942263] env[63024]: value = "task-1951475" [ 1977.942263] env[63024]: _type = "Task" [ 1977.942263] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1977.950814] env[63024]: DEBUG oslo_vmware.api [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951475, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1978.029866] env[63024]: DEBUG nova.compute.utils [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1978.032899] env[63024]: DEBUG nova.compute.manager [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1978.033227] env[63024]: DEBUG nova.network.neutron [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1978.066458] env[63024]: DEBUG oslo_concurrency.lockutils [None req-780eca41-0883-4fa9-b156-2f4e256e4314 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Lock "01b8072a-4483-4932-8294-7e5b48e6b203" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.374s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1978.074716] env[63024]: DEBUG oslo_vmware.api [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Task: {'id': task-1951474, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070823} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1978.080033] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1978.080033] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-698d3ca6-c974-4865-9fe2-a8cde39a6aba {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.107848] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] 6c277ff8-ec25-4fd7-9dea-0efea9a0de29/6c277ff8-ec25-4fd7-9dea-0efea9a0de29.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1978.113084] env[63024]: DEBUG nova.policy [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '893bfe0d8eef423aae6c7eb5cdc1a9e9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '18540818b60e4483963d14559bc5c38d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1978.114937] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3b82f0ba-17ac-4a46-8d20-556f6f99388b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.138247] env[63024]: DEBUG oslo_vmware.api [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Waiting for the task: (returnval){ [ 1978.138247] env[63024]: value = "task-1951476" [ 1978.138247] env[63024]: _type = "Task" [ 1978.138247] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1978.153553] env[63024]: DEBUG oslo_vmware.api [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Task: {'id': task-1951476, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1978.334299] env[63024]: DEBUG nova.network.neutron [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1978.458095] env[63024]: DEBUG oslo_vmware.api [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951475, 'name': RemoveSnapshot_Task, 'duration_secs': 0.350402} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1978.461726] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Deleted Snapshot of the VM instance {{(pid=63024) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1978.461990] env[63024]: DEBUG nova.compute.manager [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1978.462842] env[63024]: DEBUG nova.network.neutron [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] Successfully created port: 19edf22a-26aa-401d-bda2-51e972825722 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1978.465755] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-555f1cae-c609-436e-8a99-9555ab8d6490 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.540325] env[63024]: DEBUG nova.compute.manager [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1978.592177] env[63024]: DEBUG nova.network.neutron [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Updating instance_info_cache with network_info: [{"id": "86c4ee07-9789-478b-a753-54e9ba818274", "address": "fa:16:3e:4e:96:18", "network": {"id": "cb6313a5-90d6-434f-8a40-a6c8b06987d9", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-809145964-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c59166884ad840669a1f90ea5f19afd3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33ddef78-922c-4cd3-99b0-971ac7802856", "external-id": "nsx-vlan-transportzone-311", "segmentation_id": 311, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap86c4ee07-97", "ovs_interfaceid": "86c4ee07-9789-478b-a753-54e9ba818274", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1978.608586] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15fe671e-0966-4424-acad-da8e1e0ecfd2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.618153] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dc0fff0-ca28-47af-a5c1-755012098a24 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.659327] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-145894a0-0860-47e3-8cf8-3e15cdaf7f56 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.671170] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19dd76da-10e1-4571-b453-047fc5346350 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.677498] env[63024]: DEBUG oslo_vmware.api [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Task: {'id': task-1951476, 'name': ReconfigVM_Task, 'duration_secs': 0.316666} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1978.677895] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Reconfigured VM instance instance-0000005a to attach disk [datastore1] 6c277ff8-ec25-4fd7-9dea-0efea9a0de29/6c277ff8-ec25-4fd7-9dea-0efea9a0de29.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1978.678870] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3ecbab12-ce01-48df-9e20-d7bdcb1ef387 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1978.690446] env[63024]: DEBUG nova.compute.provider_tree [None req-c6d5aa65-9c13-4f2c-9dd6-73a60a418546 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1978.693095] env[63024]: DEBUG oslo_vmware.api [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Waiting for the task: (returnval){ [ 1978.693095] env[63024]: value = "task-1951477" [ 1978.693095] env[63024]: _type = "Task" [ 1978.693095] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1978.703429] env[63024]: DEBUG oslo_vmware.api [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Task: {'id': task-1951477, 'name': Rename_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1978.832164] env[63024]: DEBUG nova.network.neutron [-] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1978.979520] env[63024]: INFO nova.compute.manager [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Shelve offloading [ 1979.097088] env[63024]: DEBUG oslo_concurrency.lockutils [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Releasing lock "refresh_cache-51bdfe4a-2439-4ad5-97f3-f60c70c87b9d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1979.097396] env[63024]: DEBUG nova.compute.manager [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Instance network_info: |[{"id": "86c4ee07-9789-478b-a753-54e9ba818274", "address": "fa:16:3e:4e:96:18", "network": {"id": "cb6313a5-90d6-434f-8a40-a6c8b06987d9", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-809145964-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c59166884ad840669a1f90ea5f19afd3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33ddef78-922c-4cd3-99b0-971ac7802856", "external-id": "nsx-vlan-transportzone-311", "segmentation_id": 311, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap86c4ee07-97", "ovs_interfaceid": "86c4ee07-9789-478b-a753-54e9ba818274", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1979.098060] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4e:96:18', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '33ddef78-922c-4cd3-99b0-971ac7802856', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '86c4ee07-9789-478b-a753-54e9ba818274', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1979.106569] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Creating folder: Project (c59166884ad840669a1f90ea5f19afd3). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1979.106859] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2f17a2c7-e1f8-4a31-92ce-87480636f0e6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.119673] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Created folder: Project (c59166884ad840669a1f90ea5f19afd3) in parent group-v401959. [ 1979.119881] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Creating folder: Instances. Parent ref: group-v402205. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1979.120191] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0523ed61-73df-48de-8a60-1b9e40611ddf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.127046] env[63024]: DEBUG oslo_concurrency.lockutils [None req-218e5f03-ca19-4d8c-a95d-2f4b86840cf3 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquiring lock "01b8072a-4483-4932-8294-7e5b48e6b203" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1979.127649] env[63024]: DEBUG oslo_concurrency.lockutils [None req-218e5f03-ca19-4d8c-a95d-2f4b86840cf3 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Lock "01b8072a-4483-4932-8294-7e5b48e6b203" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1979.127649] env[63024]: DEBUG oslo_concurrency.lockutils [None req-218e5f03-ca19-4d8c-a95d-2f4b86840cf3 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquiring lock "01b8072a-4483-4932-8294-7e5b48e6b203-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1979.127821] env[63024]: DEBUG oslo_concurrency.lockutils [None req-218e5f03-ca19-4d8c-a95d-2f4b86840cf3 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Lock "01b8072a-4483-4932-8294-7e5b48e6b203-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1979.127898] env[63024]: DEBUG oslo_concurrency.lockutils [None req-218e5f03-ca19-4d8c-a95d-2f4b86840cf3 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Lock "01b8072a-4483-4932-8294-7e5b48e6b203-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1979.131809] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Created folder: Instances in parent group-v402205. [ 1979.132092] env[63024]: DEBUG oslo.service.loopingcall [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1979.132307] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1979.132520] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5469fdaf-34d9-49ea-af6c-eebcee989d16 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.147193] env[63024]: INFO nova.compute.manager [None req-218e5f03-ca19-4d8c-a95d-2f4b86840cf3 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Terminating instance [ 1979.154949] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1979.154949] env[63024]: value = "task-1951480" [ 1979.154949] env[63024]: _type = "Task" [ 1979.154949] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1979.163399] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951480, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1979.195815] env[63024]: DEBUG nova.compute.manager [req-915e634b-ec85-4f9f-8ae1-51bb6e958455 req-60db2ed9-4524-4809-bb3b-a9b95936dcb5 service nova] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Received event network-changed-86c4ee07-9789-478b-a753-54e9ba818274 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1979.196054] env[63024]: DEBUG nova.compute.manager [req-915e634b-ec85-4f9f-8ae1-51bb6e958455 req-60db2ed9-4524-4809-bb3b-a9b95936dcb5 service nova] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Refreshing instance network info cache due to event network-changed-86c4ee07-9789-478b-a753-54e9ba818274. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1979.196250] env[63024]: DEBUG oslo_concurrency.lockutils [req-915e634b-ec85-4f9f-8ae1-51bb6e958455 req-60db2ed9-4524-4809-bb3b-a9b95936dcb5 service nova] Acquiring lock "refresh_cache-51bdfe4a-2439-4ad5-97f3-f60c70c87b9d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1979.196398] env[63024]: DEBUG oslo_concurrency.lockutils [req-915e634b-ec85-4f9f-8ae1-51bb6e958455 req-60db2ed9-4524-4809-bb3b-a9b95936dcb5 service nova] Acquired lock "refresh_cache-51bdfe4a-2439-4ad5-97f3-f60c70c87b9d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1979.196563] env[63024]: DEBUG nova.network.neutron [req-915e634b-ec85-4f9f-8ae1-51bb6e958455 req-60db2ed9-4524-4809-bb3b-a9b95936dcb5 service nova] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Refreshing network info cache for port 86c4ee07-9789-478b-a753-54e9ba818274 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1979.202173] env[63024]: DEBUG nova.scheduler.client.report [None req-c6d5aa65-9c13-4f2c-9dd6-73a60a418546 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1979.212679] env[63024]: DEBUG oslo_vmware.api [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Task: {'id': task-1951477, 'name': Rename_Task, 'duration_secs': 0.198988} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1979.212796] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1979.212991] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1e528c7c-c279-4d03-8b7b-365300c83965 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.221357] env[63024]: DEBUG oslo_vmware.api [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Waiting for the task: (returnval){ [ 1979.221357] env[63024]: value = "task-1951481" [ 1979.221357] env[63024]: _type = "Task" [ 1979.221357] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1979.231499] env[63024]: DEBUG oslo_vmware.api [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Task: {'id': task-1951481, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1979.334752] env[63024]: INFO nova.compute.manager [-] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Took 1.75 seconds to deallocate network for instance. [ 1979.484015] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1979.484745] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a000b30d-f113-450e-9b51-58493fa55fc0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.494584] env[63024]: DEBUG oslo_vmware.api [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Waiting for the task: (returnval){ [ 1979.494584] env[63024]: value = "task-1951482" [ 1979.494584] env[63024]: _type = "Task" [ 1979.494584] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1979.506559] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] VM already powered off {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1979.506850] env[63024]: DEBUG nova.compute.manager [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1979.507674] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecfccfc6-f56a-40d8-adc6-d60cab30c749 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.514213] env[63024]: DEBUG oslo_concurrency.lockutils [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Acquiring lock "refresh_cache-92d1f96e-bbe7-4654-9d3a-47ba40057157" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1979.514431] env[63024]: DEBUG oslo_concurrency.lockutils [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Acquired lock "refresh_cache-92d1f96e-bbe7-4654-9d3a-47ba40057157" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1979.514610] env[63024]: DEBUG nova.network.neutron [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1979.550664] env[63024]: DEBUG nova.compute.manager [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1979.577416] env[63024]: DEBUG nova.virt.hardware [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1979.577669] env[63024]: DEBUG nova.virt.hardware [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1979.577831] env[63024]: DEBUG nova.virt.hardware [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1979.578014] env[63024]: DEBUG nova.virt.hardware [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1979.578163] env[63024]: DEBUG nova.virt.hardware [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1979.578305] env[63024]: DEBUG nova.virt.hardware [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1979.578506] env[63024]: DEBUG nova.virt.hardware [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1979.579438] env[63024]: DEBUG nova.virt.hardware [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1979.579438] env[63024]: DEBUG nova.virt.hardware [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1979.579438] env[63024]: DEBUG nova.virt.hardware [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1979.579438] env[63024]: DEBUG nova.virt.hardware [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1979.580127] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8012f231-b0d6-4313-b921-c622470b4b60 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.589785] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf0042e7-6d7b-4d5f-b3a2-6e8c6002e844 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.651889] env[63024]: DEBUG nova.compute.manager [None req-218e5f03-ca19-4d8c-a95d-2f4b86840cf3 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1979.652689] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-218e5f03-ca19-4d8c-a95d-2f4b86840cf3 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1979.653447] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fca3f3b-a6f7-49c3-b83b-f2bca79a9fcf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.666236] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951480, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1979.668558] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-218e5f03-ca19-4d8c-a95d-2f4b86840cf3 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1979.668833] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0e18141f-264a-437e-acbe-f3e3000ef14f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1979.676666] env[63024]: DEBUG oslo_vmware.api [None req-218e5f03-ca19-4d8c-a95d-2f4b86840cf3 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1979.676666] env[63024]: value = "task-1951483" [ 1979.676666] env[63024]: _type = "Task" [ 1979.676666] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1979.685526] env[63024]: DEBUG oslo_vmware.api [None req-218e5f03-ca19-4d8c-a95d-2f4b86840cf3 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951483, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1979.733599] env[63024]: DEBUG oslo_vmware.api [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Task: {'id': task-1951481, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1979.844073] env[63024]: DEBUG oslo_concurrency.lockutils [None req-519aa69f-b02f-485a-bf44-1bfb26c10be8 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1979.932984] env[63024]: DEBUG nova.compute.manager [req-7e593ad9-02f9-4dc8-8177-b1ddfd4d4e6e req-1c39c62e-5633-4b5b-a335-f8d14003aa31 service nova] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] Received event network-vif-plugged-19edf22a-26aa-401d-bda2-51e972825722 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1979.933262] env[63024]: DEBUG oslo_concurrency.lockutils [req-7e593ad9-02f9-4dc8-8177-b1ddfd4d4e6e req-1c39c62e-5633-4b5b-a335-f8d14003aa31 service nova] Acquiring lock "e9784dce-9a3f-4969-b48c-9c5b17959d88-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1979.933478] env[63024]: DEBUG oslo_concurrency.lockutils [req-7e593ad9-02f9-4dc8-8177-b1ddfd4d4e6e req-1c39c62e-5633-4b5b-a335-f8d14003aa31 service nova] Lock "e9784dce-9a3f-4969-b48c-9c5b17959d88-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1979.933644] env[63024]: DEBUG oslo_concurrency.lockutils [req-7e593ad9-02f9-4dc8-8177-b1ddfd4d4e6e req-1c39c62e-5633-4b5b-a335-f8d14003aa31 service nova] Lock "e9784dce-9a3f-4969-b48c-9c5b17959d88-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1979.933803] env[63024]: DEBUG nova.compute.manager [req-7e593ad9-02f9-4dc8-8177-b1ddfd4d4e6e req-1c39c62e-5633-4b5b-a335-f8d14003aa31 service nova] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] No waiting events found dispatching network-vif-plugged-19edf22a-26aa-401d-bda2-51e972825722 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1979.933963] env[63024]: WARNING nova.compute.manager [req-7e593ad9-02f9-4dc8-8177-b1ddfd4d4e6e req-1c39c62e-5633-4b5b-a335-f8d14003aa31 service nova] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] Received unexpected event network-vif-plugged-19edf22a-26aa-401d-bda2-51e972825722 for instance with vm_state building and task_state spawning. [ 1980.014763] env[63024]: DEBUG nova.network.neutron [req-915e634b-ec85-4f9f-8ae1-51bb6e958455 req-60db2ed9-4524-4809-bb3b-a9b95936dcb5 service nova] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Updated VIF entry in instance network info cache for port 86c4ee07-9789-478b-a753-54e9ba818274. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1980.015253] env[63024]: DEBUG nova.network.neutron [req-915e634b-ec85-4f9f-8ae1-51bb6e958455 req-60db2ed9-4524-4809-bb3b-a9b95936dcb5 service nova] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Updating instance_info_cache with network_info: [{"id": "86c4ee07-9789-478b-a753-54e9ba818274", "address": "fa:16:3e:4e:96:18", "network": {"id": "cb6313a5-90d6-434f-8a40-a6c8b06987d9", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-809145964-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c59166884ad840669a1f90ea5f19afd3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "33ddef78-922c-4cd3-99b0-971ac7802856", "external-id": "nsx-vlan-transportzone-311", "segmentation_id": 311, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap86c4ee07-97", "ovs_interfaceid": "86c4ee07-9789-478b-a753-54e9ba818274", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1980.031701] env[63024]: DEBUG nova.network.neutron [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] Successfully updated port: 19edf22a-26aa-401d-bda2-51e972825722 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1980.167515] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951480, 'name': CreateVM_Task, 'duration_secs': 0.608822} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1980.167829] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1980.168506] env[63024]: DEBUG oslo_concurrency.lockutils [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1980.168666] env[63024]: DEBUG oslo_concurrency.lockutils [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1980.168995] env[63024]: DEBUG oslo_concurrency.lockutils [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1980.169287] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7da5b553-564e-4dc0-8d0f-e17d41ed05f6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.174827] env[63024]: DEBUG oslo_vmware.api [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Waiting for the task: (returnval){ [ 1980.174827] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52abea38-22dc-e067-8f8e-d091ca73bfdc" [ 1980.174827] env[63024]: _type = "Task" [ 1980.174827] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1980.186333] env[63024]: DEBUG oslo_vmware.api [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52abea38-22dc-e067-8f8e-d091ca73bfdc, 'name': SearchDatastore_Task, 'duration_secs': 0.009163} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1980.189255] env[63024]: DEBUG oslo_concurrency.lockutils [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1980.189517] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1980.189755] env[63024]: DEBUG oslo_concurrency.lockutils [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1980.189904] env[63024]: DEBUG oslo_concurrency.lockutils [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1980.190125] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1980.190394] env[63024]: DEBUG oslo_vmware.api [None req-218e5f03-ca19-4d8c-a95d-2f4b86840cf3 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951483, 'name': PowerOffVM_Task, 'duration_secs': 0.240606} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1980.190587] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0d2dc62a-2609-4f75-b009-71eb3fc89b6a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.192265] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-218e5f03-ca19-4d8c-a95d-2f4b86840cf3 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1980.192807] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-218e5f03-ca19-4d8c-a95d-2f4b86840cf3 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1980.192990] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f73e9e6a-5e62-4039-b3b7-148efc28ba2b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.202657] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1980.202835] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1980.203550] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-178d47bc-c0be-45c9-9507-dbb4e293a02e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.211159] env[63024]: DEBUG oslo_vmware.api [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Waiting for the task: (returnval){ [ 1980.211159] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5285f01b-f214-6af8-14f1-b1e1dda18d62" [ 1980.211159] env[63024]: _type = "Task" [ 1980.211159] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1980.215043] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c6d5aa65-9c13-4f2c-9dd6-73a60a418546 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.692s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1980.218060] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1853ceda-5f98-4669-95bc-ece1a6728158 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.332s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1980.221697] env[63024]: DEBUG nova.objects.instance [None req-1853ceda-5f98-4669-95bc-ece1a6728158 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Lazy-loading 'resources' on Instance uuid 82b7019c-5049-4b8b-abb4-46f326ce3d5b {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1980.229198] env[63024]: DEBUG oslo_vmware.api [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5285f01b-f214-6af8-14f1-b1e1dda18d62, 'name': SearchDatastore_Task, 'duration_secs': 0.008519} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1980.230385] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-010b9a77-1b9b-4192-80a9-ef86d45e487e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.236525] env[63024]: DEBUG oslo_vmware.api [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Task: {'id': task-1951481, 'name': PowerOnVM_Task, 'duration_secs': 0.710175} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1980.237118] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1980.237325] env[63024]: INFO nova.compute.manager [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Took 9.01 seconds to spawn the instance on the hypervisor. [ 1980.237503] env[63024]: DEBUG nova.compute.manager [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1980.238269] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64ee2622-47e8-4df3-907b-9c0d33805129 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.242371] env[63024]: DEBUG oslo_vmware.api [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Waiting for the task: (returnval){ [ 1980.242371] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52202e09-42b0-0014-c390-86150de3d568" [ 1980.242371] env[63024]: _type = "Task" [ 1980.242371] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1980.255636] env[63024]: DEBUG oslo_vmware.api [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52202e09-42b0-0014-c390-86150de3d568, 'name': SearchDatastore_Task, 'duration_secs': 0.009478} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1980.256368] env[63024]: DEBUG oslo_concurrency.lockutils [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1980.256630] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d/51bdfe4a-2439-4ad5-97f3-f60c70c87b9d.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1980.256885] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d2ef1b96-f9cc-4190-8398-8268532ba98f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.264049] env[63024]: DEBUG oslo_vmware.api [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Waiting for the task: (returnval){ [ 1980.264049] env[63024]: value = "task-1951485" [ 1980.264049] env[63024]: _type = "Task" [ 1980.264049] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1980.273308] env[63024]: DEBUG oslo_vmware.api [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Task: {'id': task-1951485, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1980.332807] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-218e5f03-ca19-4d8c-a95d-2f4b86840cf3 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1980.332807] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-218e5f03-ca19-4d8c-a95d-2f4b86840cf3 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1980.332807] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-218e5f03-ca19-4d8c-a95d-2f4b86840cf3 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Deleting the datastore file [datastore1] 01b8072a-4483-4932-8294-7e5b48e6b203 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1980.332807] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-55919f25-4cbe-4843-b1f9-79ae2884acb9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.341486] env[63024]: DEBUG oslo_vmware.api [None req-218e5f03-ca19-4d8c-a95d-2f4b86840cf3 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 1980.341486] env[63024]: value = "task-1951486" [ 1980.341486] env[63024]: _type = "Task" [ 1980.341486] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1980.351494] env[63024]: DEBUG oslo_vmware.api [None req-218e5f03-ca19-4d8c-a95d-2f4b86840cf3 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951486, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1980.357321] env[63024]: DEBUG nova.network.neutron [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Updating instance_info_cache with network_info: [{"id": "05bc00c8-444d-425a-8c1e-0d34f269c7e8", "address": "fa:16:3e:d8:3a:2e", "network": {"id": "feb2323b-f3cf-42d6-a22b-81d1c94fce9d", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-75667819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f87eadd82394447910efa7b71814e97", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "44ed8f45-cb8e-40e7-ac70-a7f386a7d2c2", "external-id": "nsx-vlan-transportzone-268", "segmentation_id": 268, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05bc00c8-44", "ovs_interfaceid": "05bc00c8-444d-425a-8c1e-0d34f269c7e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1980.520911] env[63024]: DEBUG oslo_concurrency.lockutils [req-915e634b-ec85-4f9f-8ae1-51bb6e958455 req-60db2ed9-4524-4809-bb3b-a9b95936dcb5 service nova] Releasing lock "refresh_cache-51bdfe4a-2439-4ad5-97f3-f60c70c87b9d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1980.521070] env[63024]: DEBUG nova.compute.manager [req-915e634b-ec85-4f9f-8ae1-51bb6e958455 req-60db2ed9-4524-4809-bb3b-a9b95936dcb5 service nova] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Received event network-vif-deleted-c48bb2e4-b1fe-46e8-9eaf-75fcb7827e8c {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1980.534134] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "refresh_cache-e9784dce-9a3f-4969-b48c-9c5b17959d88" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1980.534367] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquired lock "refresh_cache-e9784dce-9a3f-4969-b48c-9c5b17959d88" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1980.534555] env[63024]: DEBUG nova.network.neutron [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1980.778748] env[63024]: INFO nova.compute.manager [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Took 25.20 seconds to build instance. [ 1980.789010] env[63024]: DEBUG oslo_vmware.api [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Task: {'id': task-1951485, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.517513} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1980.792093] env[63024]: INFO nova.scheduler.client.report [None req-c6d5aa65-9c13-4f2c-9dd6-73a60a418546 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Deleted allocation for migration 6aa2db89-5a8c-414f-93a4-16db64f9c2e6 [ 1980.799760] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d/51bdfe4a-2439-4ad5-97f3-f60c70c87b9d.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1980.799760] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1980.799760] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-084ba32e-a110-4d9c-a89b-3816ca54c4fa {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1980.808739] env[63024]: DEBUG oslo_vmware.api [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Waiting for the task: (returnval){ [ 1980.808739] env[63024]: value = "task-1951487" [ 1980.808739] env[63024]: _type = "Task" [ 1980.808739] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1980.828562] env[63024]: DEBUG oslo_vmware.api [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Task: {'id': task-1951487, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1980.852486] env[63024]: DEBUG oslo_vmware.api [None req-218e5f03-ca19-4d8c-a95d-2f4b86840cf3 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951486, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.440017} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1980.852868] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-218e5f03-ca19-4d8c-a95d-2f4b86840cf3 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1980.853105] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-218e5f03-ca19-4d8c-a95d-2f4b86840cf3 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1980.853300] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-218e5f03-ca19-4d8c-a95d-2f4b86840cf3 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1980.853995] env[63024]: INFO nova.compute.manager [None req-218e5f03-ca19-4d8c-a95d-2f4b86840cf3 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Took 1.20 seconds to destroy the instance on the hypervisor. [ 1980.853995] env[63024]: DEBUG oslo.service.loopingcall [None req-218e5f03-ca19-4d8c-a95d-2f4b86840cf3 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1980.853995] env[63024]: DEBUG nova.compute.manager [-] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1980.854172] env[63024]: DEBUG nova.network.neutron [-] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1980.861294] env[63024]: DEBUG oslo_concurrency.lockutils [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Releasing lock "refresh_cache-92d1f96e-bbe7-4654-9d3a-47ba40057157" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1981.111641] env[63024]: DEBUG nova.network.neutron [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1981.176131] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1981.177034] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d786efbd-2f76-4492-8990-742b6902b5fa {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.181938] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aa6e91b-a903-472a-99a4-749d6add078a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.192266] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e433834f-fe3b-4098-b5f8-4a630562faf9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.198739] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1981.201883] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cafebf8c-982c-4ffb-bfd6-cd7b694a7718 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.233130] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d0448fb-07fa-4a21-848f-4d67c953d2d3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.241744] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-810f8201-78ff-448a-86d9-401665df8d22 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.258930] env[63024]: DEBUG nova.compute.provider_tree [None req-1853ceda-5f98-4669-95bc-ece1a6728158 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1981.281830] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9f714463-f8e2-4c36-961d-33b2cad1ebc8 tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Lock "6c277ff8-ec25-4fd7-9dea-0efea9a0de29" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.716s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1981.299564] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c6d5aa65-9c13-4f2c-9dd6-73a60a418546 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "9e8e7b6e-1bb2-4e66-b734-2f56e31302af" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 22.592s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1981.325118] env[63024]: DEBUG oslo_vmware.api [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Task: {'id': task-1951487, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.250146} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1981.326040] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1981.326842] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9641726-9efe-4f6d-9320-3f2018261070 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.351424] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Reconfiguring VM instance instance-0000005b to attach disk [datastore1] 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d/51bdfe4a-2439-4ad5-97f3-f60c70c87b9d.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1981.352151] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-adb897ea-d50d-4040-9ec4-f2b9ca168377 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.369082] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1981.369319] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1981.369585] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Deleting the datastore file [datastore1] 92d1f96e-bbe7-4654-9d3a-47ba40057157 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1981.370283] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-11b56510-e95d-4db6-bdd1-5b8ebab757f1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1981.376015] env[63024]: DEBUG oslo_vmware.api [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Waiting for the task: (returnval){ [ 1981.376015] env[63024]: value = "task-1951489" [ 1981.376015] env[63024]: _type = "Task" [ 1981.376015] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1981.380406] env[63024]: DEBUG oslo_vmware.api [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Waiting for the task: (returnval){ [ 1981.380406] env[63024]: value = "task-1951490" [ 1981.380406] env[63024]: _type = "Task" [ 1981.380406] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1981.386635] env[63024]: DEBUG oslo_vmware.api [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Task: {'id': task-1951489, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1981.392687] env[63024]: DEBUG oslo_vmware.api [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951490, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1981.462694] env[63024]: DEBUG nova.network.neutron [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] Updating instance_info_cache with network_info: [{"id": "19edf22a-26aa-401d-bda2-51e972825722", "address": "fa:16:3e:59:71:f0", "network": {"id": "b800daa8-f0f9-4823-92b0-ff8d853cc0ff", "bridge": "br-int", "label": "tempest-ServersTestJSON-1406445940-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18540818b60e4483963d14559bc5c38d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec763be6-4041-4651-8fd7-3820cf0ab86d", "external-id": "nsx-vlan-transportzone-943", "segmentation_id": 943, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19edf22a-26", "ovs_interfaceid": "19edf22a-26aa-401d-bda2-51e972825722", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1981.764349] env[63024]: DEBUG nova.scheduler.client.report [None req-1853ceda-5f98-4669-95bc-ece1a6728158 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1981.889866] env[63024]: DEBUG oslo_vmware.api [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Task: {'id': task-1951489, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1981.899359] env[63024]: DEBUG oslo_vmware.api [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951490, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.137657} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1981.899963] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1981.900372] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1981.903118] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1981.930301] env[63024]: INFO nova.scheduler.client.report [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Deleted allocations for instance 92d1f96e-bbe7-4654-9d3a-47ba40057157 [ 1981.965939] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Releasing lock "refresh_cache-e9784dce-9a3f-4969-b48c-9c5b17959d88" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1981.966350] env[63024]: DEBUG nova.compute.manager [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] Instance network_info: |[{"id": "19edf22a-26aa-401d-bda2-51e972825722", "address": "fa:16:3e:59:71:f0", "network": {"id": "b800daa8-f0f9-4823-92b0-ff8d853cc0ff", "bridge": "br-int", "label": "tempest-ServersTestJSON-1406445940-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18540818b60e4483963d14559bc5c38d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec763be6-4041-4651-8fd7-3820cf0ab86d", "external-id": "nsx-vlan-transportzone-943", "segmentation_id": 943, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19edf22a-26", "ovs_interfaceid": "19edf22a-26aa-401d-bda2-51e972825722", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1981.966817] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:59:71:f0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ec763be6-4041-4651-8fd7-3820cf0ab86d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '19edf22a-26aa-401d-bda2-51e972825722', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1981.975573] env[63024]: DEBUG oslo.service.loopingcall [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1981.976512] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1981.976754] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5a1ddb71-a07d-45a4-bd3b-69c29b170f19 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.007469] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1982.007469] env[63024]: value = "task-1951491" [ 1982.007469] env[63024]: _type = "Task" [ 1982.007469] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1982.016463] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951491, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1982.271077] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1853ceda-5f98-4669-95bc-ece1a6728158 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.052s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1982.273517] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 16.927s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1982.296770] env[63024]: INFO nova.scheduler.client.report [None req-1853ceda-5f98-4669-95bc-ece1a6728158 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Deleted allocations for instance 82b7019c-5049-4b8b-abb4-46f326ce3d5b [ 1982.353196] env[63024]: DEBUG nova.compute.manager [req-077f130e-1c14-4259-b6ad-2badd61182ea req-8a706ac3-579a-4944-93c7-27aa87b4bde3 service nova] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Received event network-vif-deleted-7d713c35-a0bd-4dd8-b1a9-b838b3bc7a46 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1982.353527] env[63024]: INFO nova.compute.manager [req-077f130e-1c14-4259-b6ad-2badd61182ea req-8a706ac3-579a-4944-93c7-27aa87b4bde3 service nova] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Neutron deleted interface 7d713c35-a0bd-4dd8-b1a9-b838b3bc7a46; detaching it from the instance and deleting it from the info cache [ 1982.353777] env[63024]: DEBUG nova.network.neutron [req-077f130e-1c14-4259-b6ad-2badd61182ea req-8a706ac3-579a-4944-93c7-27aa87b4bde3 service nova] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1982.373450] env[63024]: DEBUG nova.network.neutron [-] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1982.384851] env[63024]: DEBUG nova.compute.manager [req-91da3ca4-09da-4bc8-92b5-16c8f38c7ea1 req-4d4a24b1-ba25-4c64-9756-4b19fe553d01 service nova] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] Received event network-changed-19edf22a-26aa-401d-bda2-51e972825722 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1982.385093] env[63024]: DEBUG nova.compute.manager [req-91da3ca4-09da-4bc8-92b5-16c8f38c7ea1 req-4d4a24b1-ba25-4c64-9756-4b19fe553d01 service nova] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] Refreshing instance network info cache due to event network-changed-19edf22a-26aa-401d-bda2-51e972825722. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1982.385355] env[63024]: DEBUG oslo_concurrency.lockutils [req-91da3ca4-09da-4bc8-92b5-16c8f38c7ea1 req-4d4a24b1-ba25-4c64-9756-4b19fe553d01 service nova] Acquiring lock "refresh_cache-e9784dce-9a3f-4969-b48c-9c5b17959d88" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1982.385456] env[63024]: DEBUG oslo_concurrency.lockutils [req-91da3ca4-09da-4bc8-92b5-16c8f38c7ea1 req-4d4a24b1-ba25-4c64-9756-4b19fe553d01 service nova] Acquired lock "refresh_cache-e9784dce-9a3f-4969-b48c-9c5b17959d88" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1982.385662] env[63024]: DEBUG nova.network.neutron [req-91da3ca4-09da-4bc8-92b5-16c8f38c7ea1 req-4d4a24b1-ba25-4c64-9756-4b19fe553d01 service nova] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] Refreshing network info cache for port 19edf22a-26aa-401d-bda2-51e972825722 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1982.393887] env[63024]: DEBUG oslo_vmware.api [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Task: {'id': task-1951489, 'name': ReconfigVM_Task, 'duration_secs': 0.632518} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1982.393887] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Reconfigured VM instance instance-0000005b to attach disk [datastore1] 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d/51bdfe4a-2439-4ad5-97f3-f60c70c87b9d.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1982.393887] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0ef6e6e3-3578-4f61-80f1-71705b496d08 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.402811] env[63024]: DEBUG oslo_vmware.api [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Waiting for the task: (returnval){ [ 1982.402811] env[63024]: value = "task-1951492" [ 1982.402811] env[63024]: _type = "Task" [ 1982.402811] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1982.413993] env[63024]: DEBUG oslo_vmware.api [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Task: {'id': task-1951492, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1982.436875] env[63024]: DEBUG oslo_concurrency.lockutils [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1982.519987] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951491, 'name': CreateVM_Task, 'duration_secs': 0.398673} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1982.520569] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1982.521403] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1982.521612] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1982.521924] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1982.522272] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb6282b5-f0dc-42b2-b212-97773bf131df {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.529531] env[63024]: DEBUG oslo_vmware.api [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 1982.529531] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52afbeae-23a1-0f94-0cd3-6fcf96c2b7ee" [ 1982.529531] env[63024]: _type = "Task" [ 1982.529531] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1982.538800] env[63024]: DEBUG oslo_vmware.api [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52afbeae-23a1-0f94-0cd3-6fcf96c2b7ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1982.780770] env[63024]: INFO nova.compute.claims [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1982.806924] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1853ceda-5f98-4669-95bc-ece1a6728158 tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Lock "82b7019c-5049-4b8b-abb4-46f326ce3d5b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.364s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1982.858022] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c68355c9-05a3-47cb-a8c8-64d6ae916793 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.868707] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d561299-5b87-4464-8a1d-eb2aa7c9fa4a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.879348] env[63024]: INFO nova.compute.manager [-] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Took 2.03 seconds to deallocate network for instance. [ 1982.907585] env[63024]: DEBUG nova.compute.manager [req-077f130e-1c14-4259-b6ad-2badd61182ea req-8a706ac3-579a-4944-93c7-27aa87b4bde3 service nova] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Detach interface failed, port_id=7d713c35-a0bd-4dd8-b1a9-b838b3bc7a46, reason: Instance 01b8072a-4483-4932-8294-7e5b48e6b203 could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 1982.918488] env[63024]: DEBUG oslo_vmware.api [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Task: {'id': task-1951492, 'name': Rename_Task, 'duration_secs': 0.162664} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1982.918488] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1982.918488] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0129a9df-5b34-403b-af66-ceb4b26f904a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1982.929462] env[63024]: DEBUG oslo_vmware.api [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Waiting for the task: (returnval){ [ 1982.929462] env[63024]: value = "task-1951493" [ 1982.929462] env[63024]: _type = "Task" [ 1982.929462] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1982.938193] env[63024]: DEBUG oslo_vmware.api [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Task: {'id': task-1951493, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1982.966551] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0d9e2818-8d2a-418a-a421-57c6f02ed7ce tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "9e8e7b6e-1bb2-4e66-b734-2f56e31302af" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1982.968896] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0d9e2818-8d2a-418a-a421-57c6f02ed7ce tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "9e8e7b6e-1bb2-4e66-b734-2f56e31302af" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.002s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1982.968896] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0d9e2818-8d2a-418a-a421-57c6f02ed7ce tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "9e8e7b6e-1bb2-4e66-b734-2f56e31302af-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1982.968896] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0d9e2818-8d2a-418a-a421-57c6f02ed7ce tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "9e8e7b6e-1bb2-4e66-b734-2f56e31302af-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1982.968896] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0d9e2818-8d2a-418a-a421-57c6f02ed7ce tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "9e8e7b6e-1bb2-4e66-b734-2f56e31302af-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1982.973040] env[63024]: INFO nova.compute.manager [None req-0d9e2818-8d2a-418a-a421-57c6f02ed7ce tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Terminating instance [ 1982.995442] env[63024]: DEBUG oslo_concurrency.lockutils [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Acquiring lock "6c277ff8-ec25-4fd7-9dea-0efea9a0de29" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1982.995574] env[63024]: DEBUG oslo_concurrency.lockutils [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Lock "6c277ff8-ec25-4fd7-9dea-0efea9a0de29" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1982.995696] env[63024]: DEBUG oslo_concurrency.lockutils [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Acquiring lock "6c277ff8-ec25-4fd7-9dea-0efea9a0de29-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1982.995881] env[63024]: DEBUG oslo_concurrency.lockutils [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Lock "6c277ff8-ec25-4fd7-9dea-0efea9a0de29-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1982.996065] env[63024]: DEBUG oslo_concurrency.lockutils [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Lock "6c277ff8-ec25-4fd7-9dea-0efea9a0de29-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1982.998102] env[63024]: INFO nova.compute.manager [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Terminating instance [ 1983.044891] env[63024]: DEBUG oslo_vmware.api [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52afbeae-23a1-0f94-0cd3-6fcf96c2b7ee, 'name': SearchDatastore_Task, 'duration_secs': 0.010224} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1983.045031] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1983.045478] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1983.045737] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1983.045913] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1983.046636] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1983.046927] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4934bd28-b937-4be4-87b4-802e26357bf4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.060436] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1983.060815] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1983.061702] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd95f21e-4f9a-4551-9e16-ae02a5f97346 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.070142] env[63024]: DEBUG oslo_vmware.api [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 1983.070142] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]529fe9f7-31db-1b69-55c2-f22a349cf2eb" [ 1983.070142] env[63024]: _type = "Task" [ 1983.070142] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1983.084201] env[63024]: DEBUG oslo_vmware.api [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]529fe9f7-31db-1b69-55c2-f22a349cf2eb, 'name': SearchDatastore_Task, 'duration_secs': 0.011121} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1983.088580] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-120df304-591f-473e-a5a4-4ec19bb1701e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.093962] env[63024]: DEBUG oslo_vmware.api [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 1983.093962] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52413d9f-acd6-b5d7-13db-d5bd4135ae3e" [ 1983.093962] env[63024]: _type = "Task" [ 1983.093962] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1983.106093] env[63024]: DEBUG oslo_vmware.api [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52413d9f-acd6-b5d7-13db-d5bd4135ae3e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1983.271851] env[63024]: DEBUG nova.network.neutron [req-91da3ca4-09da-4bc8-92b5-16c8f38c7ea1 req-4d4a24b1-ba25-4c64-9756-4b19fe553d01 service nova] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] Updated VIF entry in instance network info cache for port 19edf22a-26aa-401d-bda2-51e972825722. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1983.271851] env[63024]: DEBUG nova.network.neutron [req-91da3ca4-09da-4bc8-92b5-16c8f38c7ea1 req-4d4a24b1-ba25-4c64-9756-4b19fe553d01 service nova] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] Updating instance_info_cache with network_info: [{"id": "19edf22a-26aa-401d-bda2-51e972825722", "address": "fa:16:3e:59:71:f0", "network": {"id": "b800daa8-f0f9-4823-92b0-ff8d853cc0ff", "bridge": "br-int", "label": "tempest-ServersTestJSON-1406445940-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18540818b60e4483963d14559bc5c38d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec763be6-4041-4651-8fd7-3820cf0ab86d", "external-id": "nsx-vlan-transportzone-943", "segmentation_id": 943, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19edf22a-26", "ovs_interfaceid": "19edf22a-26aa-401d-bda2-51e972825722", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1983.287294] env[63024]: INFO nova.compute.resource_tracker [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Updating resource usage from migration 8ae5334b-067b-4768-9988-eb18b89ae1b7 [ 1983.389521] env[63024]: DEBUG oslo_concurrency.lockutils [None req-218e5f03-ca19-4d8c-a95d-2f4b86840cf3 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1983.445834] env[63024]: DEBUG oslo_vmware.api [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Task: {'id': task-1951493, 'name': PowerOnVM_Task} progress is 86%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1983.481416] env[63024]: DEBUG nova.compute.manager [None req-0d9e2818-8d2a-418a-a421-57c6f02ed7ce tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1983.481416] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-0d9e2818-8d2a-418a-a421-57c6f02ed7ce tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1983.481416] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a7eb8dc-508a-4db6-8c7d-518aacc37d15 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.490878] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d9e2818-8d2a-418a-a421-57c6f02ed7ce tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1983.495935] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-512f3819-5680-4f46-97d2-842d3cced16c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.506962] env[63024]: DEBUG nova.compute.manager [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1983.507303] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1983.507717] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "85f31573-5535-4712-b736-747c43ed74b3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1983.508402] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "85f31573-5535-4712-b736-747c43ed74b3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1983.510184] env[63024]: DEBUG oslo_vmware.api [None req-0d9e2818-8d2a-418a-a421-57c6f02ed7ce tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1983.510184] env[63024]: value = "task-1951494" [ 1983.510184] env[63024]: _type = "Task" [ 1983.510184] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1983.514041] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90abd50d-243a-4946-a9f7-68c7c94ee673 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.524292] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b7933569-dd5c-4390-b5ea-b74b99b7578a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquiring lock "c1fd4146-6dd3-49e9-a744-466e6168e158" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1983.524550] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b7933569-dd5c-4390-b5ea-b74b99b7578a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Lock "c1fd4146-6dd3-49e9-a744-466e6168e158" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1983.524861] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b7933569-dd5c-4390-b5ea-b74b99b7578a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquiring lock "c1fd4146-6dd3-49e9-a744-466e6168e158-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1983.525142] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b7933569-dd5c-4390-b5ea-b74b99b7578a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Lock "c1fd4146-6dd3-49e9-a744-466e6168e158-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1983.525428] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b7933569-dd5c-4390-b5ea-b74b99b7578a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Lock "c1fd4146-6dd3-49e9-a744-466e6168e158-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1983.528557] env[63024]: INFO nova.compute.manager [None req-b7933569-dd5c-4390-b5ea-b74b99b7578a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Terminating instance [ 1983.537800] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1983.541013] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6cc51c48-e8f7-49db-8973-0c4ac869a2ad {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.545872] env[63024]: DEBUG oslo_vmware.api [None req-0d9e2818-8d2a-418a-a421-57c6f02ed7ce tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951494, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1983.556711] env[63024]: DEBUG oslo_vmware.api [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Waiting for the task: (returnval){ [ 1983.556711] env[63024]: value = "task-1951495" [ 1983.556711] env[63024]: _type = "Task" [ 1983.556711] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1983.571293] env[63024]: DEBUG oslo_vmware.api [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Task: {'id': task-1951495, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1983.610971] env[63024]: DEBUG oslo_vmware.api [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52413d9f-acd6-b5d7-13db-d5bd4135ae3e, 'name': SearchDatastore_Task, 'duration_secs': 0.013604} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1983.611436] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1983.611728] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] e9784dce-9a3f-4969-b48c-9c5b17959d88/e9784dce-9a3f-4969-b48c-9c5b17959d88.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1983.612197] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7156b6b4-775f-48fc-a68c-1f7bdb7d7741 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.627058] env[63024]: DEBUG oslo_vmware.api [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 1983.627058] env[63024]: value = "task-1951496" [ 1983.627058] env[63024]: _type = "Task" [ 1983.627058] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1983.644144] env[63024]: DEBUG oslo_vmware.api [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951496, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1983.777248] env[63024]: DEBUG oslo_concurrency.lockutils [req-91da3ca4-09da-4bc8-92b5-16c8f38c7ea1 req-4d4a24b1-ba25-4c64-9756-4b19fe553d01 service nova] Releasing lock "refresh_cache-e9784dce-9a3f-4969-b48c-9c5b17959d88" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1983.777248] env[63024]: DEBUG nova.compute.manager [req-91da3ca4-09da-4bc8-92b5-16c8f38c7ea1 req-4d4a24b1-ba25-4c64-9756-4b19fe553d01 service nova] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Received event network-vif-unplugged-05bc00c8-444d-425a-8c1e-0d34f269c7e8 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1983.777248] env[63024]: DEBUG oslo_concurrency.lockutils [req-91da3ca4-09da-4bc8-92b5-16c8f38c7ea1 req-4d4a24b1-ba25-4c64-9756-4b19fe553d01 service nova] Acquiring lock "92d1f96e-bbe7-4654-9d3a-47ba40057157-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1983.777248] env[63024]: DEBUG oslo_concurrency.lockutils [req-91da3ca4-09da-4bc8-92b5-16c8f38c7ea1 req-4d4a24b1-ba25-4c64-9756-4b19fe553d01 service nova] Lock "92d1f96e-bbe7-4654-9d3a-47ba40057157-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1983.777248] env[63024]: DEBUG oslo_concurrency.lockutils [req-91da3ca4-09da-4bc8-92b5-16c8f38c7ea1 req-4d4a24b1-ba25-4c64-9756-4b19fe553d01 service nova] Lock "92d1f96e-bbe7-4654-9d3a-47ba40057157-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1983.777459] env[63024]: DEBUG nova.compute.manager [req-91da3ca4-09da-4bc8-92b5-16c8f38c7ea1 req-4d4a24b1-ba25-4c64-9756-4b19fe553d01 service nova] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] No waiting events found dispatching network-vif-unplugged-05bc00c8-444d-425a-8c1e-0d34f269c7e8 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1983.777736] env[63024]: WARNING nova.compute.manager [req-91da3ca4-09da-4bc8-92b5-16c8f38c7ea1 req-4d4a24b1-ba25-4c64-9756-4b19fe553d01 service nova] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Received unexpected event network-vif-unplugged-05bc00c8-444d-425a-8c1e-0d34f269c7e8 for instance with vm_state shelved_offloaded and task_state None. [ 1983.777901] env[63024]: DEBUG nova.compute.manager [req-91da3ca4-09da-4bc8-92b5-16c8f38c7ea1 req-4d4a24b1-ba25-4c64-9756-4b19fe553d01 service nova] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Received event network-changed-05bc00c8-444d-425a-8c1e-0d34f269c7e8 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1983.778063] env[63024]: DEBUG nova.compute.manager [req-91da3ca4-09da-4bc8-92b5-16c8f38c7ea1 req-4d4a24b1-ba25-4c64-9756-4b19fe553d01 service nova] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Refreshing instance network info cache due to event network-changed-05bc00c8-444d-425a-8c1e-0d34f269c7e8. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1983.778257] env[63024]: DEBUG oslo_concurrency.lockutils [req-91da3ca4-09da-4bc8-92b5-16c8f38c7ea1 req-4d4a24b1-ba25-4c64-9756-4b19fe553d01 service nova] Acquiring lock "refresh_cache-92d1f96e-bbe7-4654-9d3a-47ba40057157" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1983.778387] env[63024]: DEBUG oslo_concurrency.lockutils [req-91da3ca4-09da-4bc8-92b5-16c8f38c7ea1 req-4d4a24b1-ba25-4c64-9756-4b19fe553d01 service nova] Acquired lock "refresh_cache-92d1f96e-bbe7-4654-9d3a-47ba40057157" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1983.778539] env[63024]: DEBUG nova.network.neutron [req-91da3ca4-09da-4bc8-92b5-16c8f38c7ea1 req-4d4a24b1-ba25-4c64-9756-4b19fe553d01 service nova] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Refreshing network info cache for port 05bc00c8-444d-425a-8c1e-0d34f269c7e8 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1983.795494] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eac07536-3429-46cf-9ba8-64bff8908eb9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.806692] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e875235-bd68-4b52-8e72-90c1b52b71f2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.844433] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12896db3-7b63-4306-b31c-fb7b8d5ffce4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.849172] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c13b0aaa-76f0-4dc9-843c-614ccef006d8 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquiring lock "ea24d375-ba88-42ca-a07e-52000ec613c0" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1983.849447] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c13b0aaa-76f0-4dc9-843c-614ccef006d8 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "ea24d375-ba88-42ca-a07e-52000ec613c0" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1983.856226] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc24cfab-d43c-4a2e-a967-19b3dc2df41a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1983.874058] env[63024]: DEBUG nova.compute.provider_tree [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1983.940584] env[63024]: DEBUG oslo_vmware.api [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Task: {'id': task-1951493, 'name': PowerOnVM_Task, 'duration_secs': 0.888876} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1983.940879] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1983.941105] env[63024]: INFO nova.compute.manager [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Took 8.02 seconds to spawn the instance on the hypervisor. [ 1983.941297] env[63024]: DEBUG nova.compute.manager [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1983.942426] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22188fd0-c1ed-42a4-b233-a393287c28e8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.011658] env[63024]: DEBUG nova.compute.manager [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 85f31573-5535-4712-b736-747c43ed74b3] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1984.028443] env[63024]: DEBUG oslo_vmware.api [None req-0d9e2818-8d2a-418a-a421-57c6f02ed7ce tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951494, 'name': PowerOffVM_Task, 'duration_secs': 0.217032} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1984.028718] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-0d9e2818-8d2a-418a-a421-57c6f02ed7ce tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1984.028886] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-0d9e2818-8d2a-418a-a421-57c6f02ed7ce tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1984.029641] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-894c0b39-5701-409c-b31b-68a2aa2d1d4e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.035745] env[63024]: DEBUG nova.compute.manager [None req-b7933569-dd5c-4390-b5ea-b74b99b7578a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1984.035980] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b7933569-dd5c-4390-b5ea-b74b99b7578a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1984.036881] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77bd8e9c-aa46-4f3a-89b2-257ac7b26ea2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.045455] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7933569-dd5c-4390-b5ea-b74b99b7578a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1984.045716] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dbe1fc45-7041-4b29-aac1-41470d40e41d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.053369] env[63024]: DEBUG oslo_vmware.api [None req-b7933569-dd5c-4390-b5ea-b74b99b7578a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1984.053369] env[63024]: value = "task-1951498" [ 1984.053369] env[63024]: _type = "Task" [ 1984.053369] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1984.064721] env[63024]: DEBUG oslo_vmware.api [None req-b7933569-dd5c-4390-b5ea-b74b99b7578a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951498, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.072011] env[63024]: DEBUG oslo_vmware.api [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Task: {'id': task-1951495, 'name': PowerOffVM_Task, 'duration_secs': 0.322666} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1984.072367] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1984.072570] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1984.072823] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-62dab597-c9e9-47ff-a8b6-fc4c7995a677 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.138919] env[63024]: DEBUG oslo_vmware.api [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951496, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.352981] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1984.352981] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1984.352981] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Deleting the datastore file [datastore1] 6c277ff8-ec25-4fd7-9dea-0efea9a0de29 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1984.352981] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-0d9e2818-8d2a-418a-a421-57c6f02ed7ce tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1984.353318] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-0d9e2818-8d2a-418a-a421-57c6f02ed7ce tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1984.353318] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d9e2818-8d2a-418a-a421-57c6f02ed7ce tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Deleting the datastore file [datastore1] 9e8e7b6e-1bb2-4e66-b734-2f56e31302af {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1984.354487] env[63024]: DEBUG nova.compute.utils [None req-c13b0aaa-76f0-4dc9-843c-614ccef006d8 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1984.355962] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b773e92b-b19c-481e-85aa-ce09d584e1fe {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.358680] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-71a05554-e3c5-4fbc-8145-9be3c2579ab4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.373867] env[63024]: DEBUG oslo_vmware.api [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Waiting for the task: (returnval){ [ 1984.373867] env[63024]: value = "task-1951501" [ 1984.373867] env[63024]: _type = "Task" [ 1984.373867] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1984.375429] env[63024]: DEBUG oslo_vmware.api [None req-0d9e2818-8d2a-418a-a421-57c6f02ed7ce tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 1984.375429] env[63024]: value = "task-1951500" [ 1984.375429] env[63024]: _type = "Task" [ 1984.375429] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1984.380298] env[63024]: DEBUG nova.scheduler.client.report [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1984.401678] env[63024]: DEBUG oslo_vmware.api [None req-0d9e2818-8d2a-418a-a421-57c6f02ed7ce tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951500, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.404075] env[63024]: DEBUG oslo_vmware.api [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Task: {'id': task-1951501, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.464915] env[63024]: INFO nova.compute.manager [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Took 26.30 seconds to build instance. [ 1984.536744] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1984.569532] env[63024]: DEBUG oslo_vmware.api [None req-b7933569-dd5c-4390-b5ea-b74b99b7578a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951498, 'name': PowerOffVM_Task, 'duration_secs': 0.31876} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1984.570059] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7933569-dd5c-4390-b5ea-b74b99b7578a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1984.570372] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b7933569-dd5c-4390-b5ea-b74b99b7578a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1984.570781] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bf3bc2dc-cfa5-43e9-ab5e-8d336de23a11 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.640955] env[63024]: DEBUG oslo_vmware.api [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951496, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.860177} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1984.641339] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] e9784dce-9a3f-4969-b48c-9c5b17959d88/e9784dce-9a3f-4969-b48c-9c5b17959d88.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1984.641616] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1984.641929] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-17b36cda-9cf2-4ff5-b2ea-39aedfdaec84 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.651562] env[63024]: DEBUG oslo_vmware.api [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 1984.651562] env[63024]: value = "task-1951503" [ 1984.651562] env[63024]: _type = "Task" [ 1984.651562] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1984.662162] env[63024]: DEBUG oslo_vmware.api [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951503, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.693920] env[63024]: DEBUG nova.network.neutron [req-91da3ca4-09da-4bc8-92b5-16c8f38c7ea1 req-4d4a24b1-ba25-4c64-9756-4b19fe553d01 service nova] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Updated VIF entry in instance network info cache for port 05bc00c8-444d-425a-8c1e-0d34f269c7e8. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1984.694288] env[63024]: DEBUG nova.network.neutron [req-91da3ca4-09da-4bc8-92b5-16c8f38c7ea1 req-4d4a24b1-ba25-4c64-9756-4b19fe553d01 service nova] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Updating instance_info_cache with network_info: [{"id": "05bc00c8-444d-425a-8c1e-0d34f269c7e8", "address": "fa:16:3e:d8:3a:2e", "network": {"id": "feb2323b-f3cf-42d6-a22b-81d1c94fce9d", "bridge": null, "label": "tempest-ServersNegativeTestJSON-75667819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f87eadd82394447910efa7b71814e97", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap05bc00c8-44", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1984.862251] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c13b0aaa-76f0-4dc9-843c-614ccef006d8 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "ea24d375-ba88-42ca-a07e-52000ec613c0" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.013s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1984.886635] env[63024]: DEBUG oslo_vmware.api [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Task: {'id': task-1951501, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.278124} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1984.887250] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1984.887447] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1984.887620] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1984.887793] env[63024]: INFO nova.compute.manager [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Took 1.38 seconds to destroy the instance on the hypervisor. [ 1984.888038] env[63024]: DEBUG oslo.service.loopingcall [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1984.888240] env[63024]: DEBUG nova.compute.manager [-] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1984.888336] env[63024]: DEBUG nova.network.neutron [-] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1984.893047] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.620s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1984.893237] env[63024]: INFO nova.compute.manager [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Migrating [ 1984.899745] env[63024]: DEBUG oslo_vmware.api [None req-0d9e2818-8d2a-418a-a421-57c6f02ed7ce tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951500, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.298393} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1984.902441] env[63024]: DEBUG oslo_concurrency.lockutils [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.392s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1984.904247] env[63024]: INFO nova.compute.claims [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] [instance: ec1f30e6-8410-4687-958f-f4e6e154b52f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1984.906809] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-0d9e2818-8d2a-418a-a421-57c6f02ed7ce tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1984.906990] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-0d9e2818-8d2a-418a-a421-57c6f02ed7ce tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1984.907182] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-0d9e2818-8d2a-418a-a421-57c6f02ed7ce tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1984.907354] env[63024]: INFO nova.compute.manager [None req-0d9e2818-8d2a-418a-a421-57c6f02ed7ce tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Took 1.43 seconds to destroy the instance on the hypervisor. [ 1984.907589] env[63024]: DEBUG oslo.service.loopingcall [None req-0d9e2818-8d2a-418a-a421-57c6f02ed7ce tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1984.913478] env[63024]: DEBUG nova.compute.manager [-] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1984.913478] env[63024]: DEBUG nova.network.neutron [-] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1984.945888] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b7933569-dd5c-4390-b5ea-b74b99b7578a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1984.946138] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b7933569-dd5c-4390-b5ea-b74b99b7578a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1984.946367] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7933569-dd5c-4390-b5ea-b74b99b7578a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Deleting the datastore file [datastore1] c1fd4146-6dd3-49e9-a744-466e6168e158 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1984.946597] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-aff4960c-6fc3-4623-8d05-c39fc57ed9c0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1984.954128] env[63024]: DEBUG oslo_vmware.api [None req-b7933569-dd5c-4390-b5ea-b74b99b7578a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for the task: (returnval){ [ 1984.954128] env[63024]: value = "task-1951504" [ 1984.954128] env[63024]: _type = "Task" [ 1984.954128] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1984.962654] env[63024]: DEBUG oslo_vmware.api [None req-b7933569-dd5c-4390-b5ea-b74b99b7578a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951504, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1984.967292] env[63024]: DEBUG oslo_concurrency.lockutils [None req-467b7a6a-85c7-476f-b067-88dc3a66e391 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Lock "51bdfe4a-2439-4ad5-97f3-f60c70c87b9d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.820s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1985.165945] env[63024]: DEBUG oslo_vmware.api [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951503, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.129308} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1985.166261] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1985.167212] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9fe39d5-3d8f-4f84-ba6f-fddb489e6c34 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.193981] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] e9784dce-9a3f-4969-b48c-9c5b17959d88/e9784dce-9a3f-4969-b48c-9c5b17959d88.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1985.194286] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9430a97c-952f-4a82-942a-6896b744edef {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.212022] env[63024]: DEBUG oslo_concurrency.lockutils [req-91da3ca4-09da-4bc8-92b5-16c8f38c7ea1 req-4d4a24b1-ba25-4c64-9756-4b19fe553d01 service nova] Releasing lock "refresh_cache-92d1f96e-bbe7-4654-9d3a-47ba40057157" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1985.216427] env[63024]: DEBUG oslo_vmware.api [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 1985.216427] env[63024]: value = "task-1951505" [ 1985.216427] env[63024]: _type = "Task" [ 1985.216427] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1985.225344] env[63024]: DEBUG oslo_concurrency.lockutils [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Acquiring lock "92d1f96e-bbe7-4654-9d3a-47ba40057157" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1985.225659] env[63024]: DEBUG oslo_vmware.api [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951505, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1985.412364] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "refresh_cache-fe6847e2-a742-4338-983f-698c13aaefde" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1985.412644] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquired lock "refresh_cache-fe6847e2-a742-4338-983f-698c13aaefde" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1985.412754] env[63024]: DEBUG nova.network.neutron [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1985.469085] env[63024]: DEBUG oslo_vmware.api [None req-b7933569-dd5c-4390-b5ea-b74b99b7578a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Task: {'id': task-1951504, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.161263} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1985.469359] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-b7933569-dd5c-4390-b5ea-b74b99b7578a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1985.469565] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b7933569-dd5c-4390-b5ea-b74b99b7578a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1985.469745] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b7933569-dd5c-4390-b5ea-b74b99b7578a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1985.469917] env[63024]: INFO nova.compute.manager [None req-b7933569-dd5c-4390-b5ea-b74b99b7578a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Took 1.43 seconds to destroy the instance on the hypervisor. [ 1985.470185] env[63024]: DEBUG oslo.service.loopingcall [None req-b7933569-dd5c-4390-b5ea-b74b99b7578a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1985.470765] env[63024]: DEBUG nova.compute.manager [-] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1985.470765] env[63024]: DEBUG nova.network.neutron [-] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1985.585640] env[63024]: DEBUG nova.compute.manager [req-eec12f77-eb6a-4beb-965d-448991bb1042 req-e2a3425b-b1e1-4279-8470-ff7a8d73551b service nova] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Received event network-vif-deleted-8f0f2560-fa66-4a0a-b255-b2cf04c94dae {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1985.585967] env[63024]: INFO nova.compute.manager [req-eec12f77-eb6a-4beb-965d-448991bb1042 req-e2a3425b-b1e1-4279-8470-ff7a8d73551b service nova] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Neutron deleted interface 8f0f2560-fa66-4a0a-b255-b2cf04c94dae; detaching it from the instance and deleting it from the info cache [ 1985.586282] env[63024]: DEBUG nova.network.neutron [req-eec12f77-eb6a-4beb-965d-448991bb1042 req-e2a3425b-b1e1-4279-8470-ff7a8d73551b service nova] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1985.734015] env[63024]: DEBUG oslo_vmware.api [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951505, 'name': ReconfigVM_Task, 'duration_secs': 0.362188} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1985.734015] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] Reconfigured VM instance instance-0000005c to attach disk [datastore1] e9784dce-9a3f-4969-b48c-9c5b17959d88/e9784dce-9a3f-4969-b48c-9c5b17959d88.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1985.734622] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-34cb1955-ff1c-409b-8c09-cbb48311b198 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.742734] env[63024]: DEBUG oslo_vmware.api [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 1985.742734] env[63024]: value = "task-1951506" [ 1985.742734] env[63024]: _type = "Task" [ 1985.742734] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1985.751892] env[63024]: DEBUG oslo_vmware.api [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951506, 'name': Rename_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1985.851126] env[63024]: DEBUG nova.network.neutron [-] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1985.877803] env[63024]: DEBUG nova.network.neutron [-] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1985.902705] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c13b0aaa-76f0-4dc9-843c-614ccef006d8 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquiring lock "ea24d375-ba88-42ca-a07e-52000ec613c0" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1985.902975] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c13b0aaa-76f0-4dc9-843c-614ccef006d8 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "ea24d375-ba88-42ca-a07e-52000ec613c0" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1985.903309] env[63024]: INFO nova.compute.manager [None req-c13b0aaa-76f0-4dc9-843c-614ccef006d8 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Attaching volume 5739f499-39f5-4d5e-8a12-f4f608420e01 to /dev/sdb [ 1985.944827] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e890739f-d482-4a3b-9a5f-9706378f908f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.958276] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceb5637b-fadb-4e92-91d2-2879985e2e12 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1985.973995] env[63024]: DEBUG nova.virt.block_device [None req-c13b0aaa-76f0-4dc9-843c-614ccef006d8 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Updating existing volume attachment record: 00ed746c-d945-413e-aa4f-d4fa23c2439a {{(pid=63024) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1986.090485] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-33d3cad5-58a6-4a5f-b0d2-eccdb1444fc9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.103500] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce9fbd0f-219a-4b28-beeb-02bcbc6eed43 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.142008] env[63024]: DEBUG nova.compute.manager [req-eec12f77-eb6a-4beb-965d-448991bb1042 req-e2a3425b-b1e1-4279-8470-ff7a8d73551b service nova] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Detach interface failed, port_id=8f0f2560-fa66-4a0a-b255-b2cf04c94dae, reason: Instance 6c277ff8-ec25-4fd7-9dea-0efea9a0de29 could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 1986.211578] env[63024]: DEBUG nova.network.neutron [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Updating instance_info_cache with network_info: [{"id": "a86b5113-d05e-45ac-bf54-833ea769eae5", "address": "fa:16:3e:7e:0d:a2", "network": {"id": "27687c23-521d-4beb-ad4f-278994149c2c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-381619610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.138", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e53c02ad56640dc8cbc8839669b67bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "09bf081b-cdf0-4977-abe2-2339a87409ab", "external-id": "nsx-vlan-transportzone-378", "segmentation_id": 378, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa86b5113-d0", "ovs_interfaceid": "a86b5113-d05e-45ac-bf54-833ea769eae5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1986.253434] env[63024]: DEBUG oslo_vmware.api [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951506, 'name': Rename_Task, 'duration_secs': 0.15003} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1986.253656] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1986.254150] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b36ef4ec-673a-4e28-ba29-6404ad119419 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.262150] env[63024]: DEBUG oslo_vmware.api [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 1986.262150] env[63024]: value = "task-1951510" [ 1986.262150] env[63024]: _type = "Task" [ 1986.262150] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1986.270770] env[63024]: DEBUG oslo_vmware.api [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951510, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1986.314057] env[63024]: DEBUG nova.network.neutron [-] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1986.334055] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7ae17958-bf6f-4ed2-ac39-e07f251c3b72 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Acquiring lock "51bdfe4a-2439-4ad5-97f3-f60c70c87b9d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1986.334055] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7ae17958-bf6f-4ed2-ac39-e07f251c3b72 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Lock "51bdfe4a-2439-4ad5-97f3-f60c70c87b9d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1986.334270] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7ae17958-bf6f-4ed2-ac39-e07f251c3b72 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Acquiring lock "51bdfe4a-2439-4ad5-97f3-f60c70c87b9d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1986.334683] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7ae17958-bf6f-4ed2-ac39-e07f251c3b72 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Lock "51bdfe4a-2439-4ad5-97f3-f60c70c87b9d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1986.334773] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7ae17958-bf6f-4ed2-ac39-e07f251c3b72 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Lock "51bdfe4a-2439-4ad5-97f3-f60c70c87b9d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1986.338090] env[63024]: INFO nova.compute.manager [None req-7ae17958-bf6f-4ed2-ac39-e07f251c3b72 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Terminating instance [ 1986.343584] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-873d6dea-88e3-4715-bdf9-dc0af3c53a35 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.354615] env[63024]: INFO nova.compute.manager [-] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Took 1.47 seconds to deallocate network for instance. [ 1986.357978] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70bc1674-0027-4141-973d-4a632d553bef {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.402129] env[63024]: INFO nova.compute.manager [-] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Took 1.49 seconds to deallocate network for instance. [ 1986.405413] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2e6cfd9-1f87-4faa-8626-237a8ce9d1c9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.417792] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdcdfcca-93b8-4c36-a2b2-758393a05721 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.436756] env[63024]: DEBUG nova.compute.provider_tree [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1986.714099] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Releasing lock "refresh_cache-fe6847e2-a742-4338-983f-698c13aaefde" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1986.772993] env[63024]: DEBUG oslo_vmware.api [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951510, 'name': PowerOnVM_Task, 'duration_secs': 0.468202} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1986.774226] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1986.774226] env[63024]: INFO nova.compute.manager [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] Took 7.22 seconds to spawn the instance on the hypervisor. [ 1986.774226] env[63024]: DEBUG nova.compute.manager [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1986.774496] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02cc4ef2-4a30-4234-b772-6a65d2b9e29d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.817148] env[63024]: INFO nova.compute.manager [-] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Took 1.35 seconds to deallocate network for instance. [ 1986.849363] env[63024]: DEBUG nova.compute.manager [None req-7ae17958-bf6f-4ed2-ac39-e07f251c3b72 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1986.849697] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-7ae17958-bf6f-4ed2-ac39-e07f251c3b72 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1986.851135] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0004ce6c-e12a-4ac2-952c-bda6cbdb50e0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.860775] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ae17958-bf6f-4ed2-ac39-e07f251c3b72 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1986.860952] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4e125544-38d0-41f1-9e90-65b1ca2bf01c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1986.865938] env[63024]: DEBUG oslo_concurrency.lockutils [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1986.868705] env[63024]: DEBUG oslo_vmware.api [None req-7ae17958-bf6f-4ed2-ac39-e07f251c3b72 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Waiting for the task: (returnval){ [ 1986.868705] env[63024]: value = "task-1951511" [ 1986.868705] env[63024]: _type = "Task" [ 1986.868705] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1986.877223] env[63024]: DEBUG oslo_vmware.api [None req-7ae17958-bf6f-4ed2-ac39-e07f251c3b72 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Task: {'id': task-1951511, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1986.913297] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0d9e2818-8d2a-418a-a421-57c6f02ed7ce tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1986.943017] env[63024]: DEBUG nova.scheduler.client.report [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1987.290815] env[63024]: INFO nova.compute.manager [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] Took 27.36 seconds to build instance. [ 1987.323773] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b7933569-dd5c-4390-b5ea-b74b99b7578a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1987.379723] env[63024]: DEBUG oslo_vmware.api [None req-7ae17958-bf6f-4ed2-ac39-e07f251c3b72 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Task: {'id': task-1951511, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1987.445343] env[63024]: DEBUG oslo_concurrency.lockutils [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.543s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1987.445913] env[63024]: DEBUG nova.compute.manager [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] [instance: ec1f30e6-8410-4687-958f-f4e6e154b52f] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1987.448563] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.317s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1987.450068] env[63024]: INFO nova.compute.claims [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1987.637079] env[63024]: DEBUG nova.compute.manager [req-02c906ac-6b2f-4a80-a1e4-0c2c4b084167 req-9ccf5702-2a3c-4a3d-a463-5d56f0fe7a42 service nova] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Received event network-vif-deleted-6aa34054-6865-4348-9871-fd32c747ab34 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1987.637340] env[63024]: DEBUG nova.compute.manager [req-02c906ac-6b2f-4a80-a1e4-0c2c4b084167 req-9ccf5702-2a3c-4a3d-a463-5d56f0fe7a42 service nova] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Received event network-vif-deleted-c6c42da9-f98c-4f7e-94e7-39d45bc8f882 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1987.792987] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c9122dca-30e9-487b-927d-3c8aa50eac98 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "e9784dce-9a3f-4969-b48c-9c5b17959d88" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.875s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1987.881824] env[63024]: DEBUG oslo_vmware.api [None req-7ae17958-bf6f-4ed2-ac39-e07f251c3b72 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Task: {'id': task-1951511, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1987.954935] env[63024]: DEBUG nova.compute.utils [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1987.958815] env[63024]: DEBUG nova.compute.manager [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] [instance: ec1f30e6-8410-4687-958f-f4e6e154b52f] Not allocating networking since 'none' was specified. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1988.233319] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd46bcca-4477-4ebf-b0fa-a8405b6d90ae {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.257957] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Updating instance 'fe6847e2-a742-4338-983f-698c13aaefde' progress to 0 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1988.381823] env[63024]: DEBUG oslo_vmware.api [None req-7ae17958-bf6f-4ed2-ac39-e07f251c3b72 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Task: {'id': task-1951511, 'name': PowerOffVM_Task, 'duration_secs': 1.025612} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1988.382164] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-7ae17958-bf6f-4ed2-ac39-e07f251c3b72 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1988.382348] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-7ae17958-bf6f-4ed2-ac39-e07f251c3b72 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1988.382621] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-58abf751-72ee-4690-b0d5-42966e97c7d6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.459836] env[63024]: DEBUG nova.compute.manager [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] [instance: ec1f30e6-8410-4687-958f-f4e6e154b52f] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1988.507173] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-7ae17958-bf6f-4ed2-ac39-e07f251c3b72 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1988.507409] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-7ae17958-bf6f-4ed2-ac39-e07f251c3b72 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1988.507595] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ae17958-bf6f-4ed2-ac39-e07f251c3b72 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Deleting the datastore file [datastore1] 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1988.507855] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8a25635d-d70b-42e0-ba0a-ae174d835741 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.521805] env[63024]: DEBUG oslo_vmware.api [None req-7ae17958-bf6f-4ed2-ac39-e07f251c3b72 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Waiting for the task: (returnval){ [ 1988.521805] env[63024]: value = "task-1951514" [ 1988.521805] env[63024]: _type = "Task" [ 1988.521805] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1988.530933] env[63024]: DEBUG oslo_vmware.api [None req-7ae17958-bf6f-4ed2-ac39-e07f251c3b72 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Task: {'id': task-1951514, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1988.645816] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f201dcc2-4634-49fd-8d11-54d315d7a13d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "e9784dce-9a3f-4969-b48c-9c5b17959d88" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1988.646081] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f201dcc2-4634-49fd-8d11-54d315d7a13d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "e9784dce-9a3f-4969-b48c-9c5b17959d88" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1988.646343] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f201dcc2-4634-49fd-8d11-54d315d7a13d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "e9784dce-9a3f-4969-b48c-9c5b17959d88-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1988.646540] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f201dcc2-4634-49fd-8d11-54d315d7a13d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "e9784dce-9a3f-4969-b48c-9c5b17959d88-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1988.646712] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f201dcc2-4634-49fd-8d11-54d315d7a13d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "e9784dce-9a3f-4969-b48c-9c5b17959d88-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1988.648777] env[63024]: INFO nova.compute.manager [None req-f201dcc2-4634-49fd-8d11-54d315d7a13d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] Terminating instance [ 1988.769299] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1988.769928] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ed9d4814-1da7-4fdc-a396-7f349c94b4e6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.779437] env[63024]: DEBUG oslo_vmware.api [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 1988.779437] env[63024]: value = "task-1951515" [ 1988.779437] env[63024]: _type = "Task" [ 1988.779437] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1988.807408] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] VM already powered off {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1988.807576] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Updating instance 'fe6847e2-a742-4338-983f-698c13aaefde' progress to 17 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1988.867801] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3300c606-8227-48c2-a2f4-ecf66392575c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1988.876548] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60990dcf-0e92-434d-8849-b98c0864bf04 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.642355] env[63024]: DEBUG nova.compute.manager [None req-f201dcc2-4634-49fd-8d11-54d315d7a13d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1989.642800] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f201dcc2-4634-49fd-8d11-54d315d7a13d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1989.644819] env[63024]: DEBUG nova.virt.hardware [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1989.645050] env[63024]: DEBUG nova.virt.hardware [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1989.645218] env[63024]: DEBUG nova.virt.hardware [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1989.645430] env[63024]: DEBUG nova.virt.hardware [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1989.645581] env[63024]: DEBUG nova.virt.hardware [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1989.645729] env[63024]: DEBUG nova.virt.hardware [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1989.645934] env[63024]: DEBUG nova.virt.hardware [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1989.646109] env[63024]: DEBUG nova.virt.hardware [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1989.646279] env[63024]: DEBUG nova.virt.hardware [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1989.646438] env[63024]: DEBUG nova.virt.hardware [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1989.646609] env[63024]: DEBUG nova.virt.hardware [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1989.655685] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3b62d5d-bfad-4c11-addf-f341662e1332 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.658458] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1aa32ff6-27da-48e3-b06b-14d95d2aab1c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.668420] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aa69d09-03f5-43cd-85dd-f0e36c42d192 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.676348] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-f201dcc2-4634-49fd-8d11-54d315d7a13d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1989.680556] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-440ff1f4-04ad-40aa-8681-83a73f26c6fa {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.682075] env[63024]: DEBUG oslo_vmware.api [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 1989.682075] env[63024]: value = "task-1951516" [ 1989.682075] env[63024]: _type = "Task" [ 1989.682075] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1989.682347] env[63024]: DEBUG oslo_vmware.api [None req-7ae17958-bf6f-4ed2-ac39-e07f251c3b72 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Task: {'id': task-1951514, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149995} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1989.684578] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-7ae17958-bf6f-4ed2-ac39-e07f251c3b72 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1989.684770] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-7ae17958-bf6f-4ed2-ac39-e07f251c3b72 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1989.684943] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-7ae17958-bf6f-4ed2-ac39-e07f251c3b72 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1989.685128] env[63024]: INFO nova.compute.manager [None req-7ae17958-bf6f-4ed2-ac39-e07f251c3b72 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Took 2.84 seconds to destroy the instance on the hypervisor. [ 1989.685366] env[63024]: DEBUG oslo.service.loopingcall [None req-7ae17958-bf6f-4ed2-ac39-e07f251c3b72 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1989.688423] env[63024]: DEBUG nova.compute.manager [-] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1989.688532] env[63024]: DEBUG nova.network.neutron [-] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1989.691278] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e362d2d9-bf49-42f2-97bf-7b2b752b50b4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1989.696289] env[63024]: DEBUG oslo_vmware.api [None req-f201dcc2-4634-49fd-8d11-54d315d7a13d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 1989.696289] env[63024]: value = "task-1951517" [ 1989.696289] env[63024]: _type = "Task" [ 1989.696289] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1989.703993] env[63024]: DEBUG oslo_vmware.api [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951516, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1989.712032] env[63024]: DEBUG nova.compute.provider_tree [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1989.716980] env[63024]: DEBUG oslo_vmware.api [None req-f201dcc2-4634-49fd-8d11-54d315d7a13d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951517, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1990.068028] env[63024]: DEBUG nova.compute.manager [req-e40818d8-7b53-46a2-bcab-0eb35e32e023 req-d5665c8f-087f-45a1-8da5-942be547a638 service nova] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Received event network-vif-deleted-86c4ee07-9789-478b-a753-54e9ba818274 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1990.068112] env[63024]: INFO nova.compute.manager [req-e40818d8-7b53-46a2-bcab-0eb35e32e023 req-d5665c8f-087f-45a1-8da5-942be547a638 service nova] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Neutron deleted interface 86c4ee07-9789-478b-a753-54e9ba818274; detaching it from the instance and deleting it from the info cache [ 1990.068238] env[63024]: DEBUG nova.network.neutron [req-e40818d8-7b53-46a2-bcab-0eb35e32e023 req-d5665c8f-087f-45a1-8da5-942be547a638 service nova] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1990.154070] env[63024]: DEBUG nova.compute.manager [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] [instance: ec1f30e6-8410-4687-958f-f4e6e154b52f] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1990.179820] env[63024]: DEBUG nova.virt.hardware [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1990.180094] env[63024]: DEBUG nova.virt.hardware [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1990.180321] env[63024]: DEBUG nova.virt.hardware [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1990.180438] env[63024]: DEBUG nova.virt.hardware [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1990.180582] env[63024]: DEBUG nova.virt.hardware [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1990.180726] env[63024]: DEBUG nova.virt.hardware [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1990.180928] env[63024]: DEBUG nova.virt.hardware [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1990.181099] env[63024]: DEBUG nova.virt.hardware [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1990.181270] env[63024]: DEBUG nova.virt.hardware [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1990.181437] env[63024]: DEBUG nova.virt.hardware [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1990.181606] env[63024]: DEBUG nova.virt.hardware [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1990.182516] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1abad7d6-3a8e-412e-b5a4-0e50cff417ea {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.195176] env[63024]: DEBUG oslo_vmware.api [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951516, 'name': ReconfigVM_Task, 'duration_secs': 0.238157} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1990.197217] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Updating instance 'fe6847e2-a742-4338-983f-698c13aaefde' progress to 33 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1990.201853] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-696f8722-2783-4df9-baaf-52b2a480141a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.215641] env[63024]: DEBUG nova.scheduler.client.report [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1990.219092] env[63024]: DEBUG oslo_vmware.api [None req-f201dcc2-4634-49fd-8d11-54d315d7a13d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951517, 'name': PowerOffVM_Task, 'duration_secs': 0.238139} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1990.233276] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-f201dcc2-4634-49fd-8d11-54d315d7a13d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1990.233479] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f201dcc2-4634-49fd-8d11-54d315d7a13d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1990.233944] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] [instance: ec1f30e6-8410-4687-958f-f4e6e154b52f] Instance VIF info [] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1990.241368] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Creating folder: Project (a08add86b5254e4f90557833b4627eb3). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1990.242360] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0b03fb2b-5300-4b1d-bcae-48e40316b94f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.244065] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f5955282-c592-461b-8c5d-2a8915e64732 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.261524] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Created folder: Project (a08add86b5254e4f90557833b4627eb3) in parent group-v401959. [ 1990.261524] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Creating folder: Instances. Parent ref: group-v402211. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1990.262082] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-973a772c-8f34-4224-bb89-b6a1e90218f0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.278140] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Created folder: Instances in parent group-v402211. [ 1990.278467] env[63024]: DEBUG oslo.service.loopingcall [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1990.278680] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ec1f30e6-8410-4687-958f-f4e6e154b52f] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1990.278966] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6a7b2023-4304-4623-88fb-b39c1b004a80 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.302586] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1990.302586] env[63024]: value = "task-1951521" [ 1990.302586] env[63024]: _type = "Task" [ 1990.302586] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1990.313855] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951521, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1990.526673] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f201dcc2-4634-49fd-8d11-54d315d7a13d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1990.526673] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f201dcc2-4634-49fd-8d11-54d315d7a13d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1990.526673] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-f201dcc2-4634-49fd-8d11-54d315d7a13d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Deleting the datastore file [datastore1] e9784dce-9a3f-4969-b48c-9c5b17959d88 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1990.526873] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c9f53a9d-35c1-4a7f-946f-b33d7dbe808d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.531286] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-c13b0aaa-76f0-4dc9-843c-614ccef006d8 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Volume attach. Driver type: vmdk {{(pid=63024) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1990.531286] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-c13b0aaa-76f0-4dc9-843c-614ccef006d8 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402210', 'volume_id': '5739f499-39f5-4d5e-8a12-f4f608420e01', 'name': 'volume-5739f499-39f5-4d5e-8a12-f4f608420e01', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ea24d375-ba88-42ca-a07e-52000ec613c0', 'attached_at': '', 'detached_at': '', 'volume_id': '5739f499-39f5-4d5e-8a12-f4f608420e01', 'serial': '5739f499-39f5-4d5e-8a12-f4f608420e01'} {{(pid=63024) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1990.531465] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6956d401-9630-4672-91f3-81e0f5b0e339 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.548457] env[63024]: DEBUG nova.network.neutron [-] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1990.551877] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-206ccc05-6733-4d34-a7d5-6b5a02250374 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.554779] env[63024]: DEBUG oslo_vmware.api [None req-f201dcc2-4634-49fd-8d11-54d315d7a13d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 1990.554779] env[63024]: value = "task-1951522" [ 1990.554779] env[63024]: _type = "Task" [ 1990.554779] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1990.579877] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-c13b0aaa-76f0-4dc9-843c-614ccef006d8 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Reconfiguring VM instance instance-00000055 to attach disk [datastore1] volume-5739f499-39f5-4d5e-8a12-f4f608420e01/volume-5739f499-39f5-4d5e-8a12-f4f608420e01.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1990.584027] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d1e56d76-ab18-4dfe-acc4-c05ccb4d1541 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.586348] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-da2acef9-2522-4145-adf0-42fe1e1cfdd0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.600527] env[63024]: DEBUG oslo_vmware.api [None req-f201dcc2-4634-49fd-8d11-54d315d7a13d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951522, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1990.608528] env[63024]: DEBUG oslo_vmware.api [None req-c13b0aaa-76f0-4dc9-843c-614ccef006d8 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 1990.608528] env[63024]: value = "task-1951523" [ 1990.608528] env[63024]: _type = "Task" [ 1990.608528] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1990.616088] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b34ebe3-d904-4bc3-b1f9-637c1580e053 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.637843] env[63024]: DEBUG oslo_vmware.api [None req-c13b0aaa-76f0-4dc9-843c-614ccef006d8 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951523, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1990.656444] env[63024]: DEBUG nova.compute.manager [req-e40818d8-7b53-46a2-bcab-0eb35e32e023 req-d5665c8f-087f-45a1-8da5-942be547a638 service nova] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Detach interface failed, port_id=86c4ee07-9789-478b-a753-54e9ba818274, reason: Instance 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 1990.708741] env[63024]: DEBUG nova.virt.hardware [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1990.709225] env[63024]: DEBUG nova.virt.hardware [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1990.709432] env[63024]: DEBUG nova.virt.hardware [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1990.709628] env[63024]: DEBUG nova.virt.hardware [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1990.709776] env[63024]: DEBUG nova.virt.hardware [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1990.709923] env[63024]: DEBUG nova.virt.hardware [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1990.710157] env[63024]: DEBUG nova.virt.hardware [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1990.710325] env[63024]: DEBUG nova.virt.hardware [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1990.710496] env[63024]: DEBUG nova.virt.hardware [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1990.711152] env[63024]: DEBUG nova.virt.hardware [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1990.711402] env[63024]: DEBUG nova.virt.hardware [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1990.716845] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Reconfiguring VM instance instance-00000041 to detach disk 2000 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1990.717152] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9a7d9a82-9283-49f0-897f-f023fd9157b4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.730247] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.282s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1990.730667] env[63024]: DEBUG nova.compute.manager [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1990.733305] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d455036d-d8b0-4f37-a400-febca7a607d9 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.842s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1990.733987] env[63024]: DEBUG nova.objects.instance [None req-d455036d-d8b0-4f37-a400-febca7a607d9 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Lazy-loading 'resources' on Instance uuid 839776ef-0562-424d-b301-2aa896f32e14 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1990.741987] env[63024]: DEBUG oslo_vmware.api [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 1990.741987] env[63024]: value = "task-1951524" [ 1990.741987] env[63024]: _type = "Task" [ 1990.741987] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1990.751437] env[63024]: DEBUG oslo_vmware.api [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951524, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1990.814482] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951521, 'name': CreateVM_Task, 'duration_secs': 0.324316} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1990.814702] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ec1f30e6-8410-4687-958f-f4e6e154b52f] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1990.815176] env[63024]: DEBUG oslo_concurrency.lockutils [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1990.815346] env[63024]: DEBUG oslo_concurrency.lockutils [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1990.815755] env[63024]: DEBUG oslo_concurrency.lockutils [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1990.816035] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f568ca2-1edd-4c71-8e62-82989082de05 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.821297] env[63024]: DEBUG oslo_vmware.api [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Waiting for the task: (returnval){ [ 1990.821297] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]529c2a13-8ca4-29bf-eea3-bfd3f7042f2d" [ 1990.821297] env[63024]: _type = "Task" [ 1990.821297] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1990.829851] env[63024]: DEBUG oslo_vmware.api [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]529c2a13-8ca4-29bf-eea3-bfd3f7042f2d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1991.056172] env[63024]: INFO nova.compute.manager [-] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Took 1.37 seconds to deallocate network for instance. [ 1991.070022] env[63024]: DEBUG oslo_vmware.api [None req-f201dcc2-4634-49fd-8d11-54d315d7a13d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951522, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.156369} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1991.070325] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-f201dcc2-4634-49fd-8d11-54d315d7a13d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1991.070514] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f201dcc2-4634-49fd-8d11-54d315d7a13d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1991.070691] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f201dcc2-4634-49fd-8d11-54d315d7a13d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1991.070859] env[63024]: INFO nova.compute.manager [None req-f201dcc2-4634-49fd-8d11-54d315d7a13d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] Took 1.43 seconds to destroy the instance on the hypervisor. [ 1991.071117] env[63024]: DEBUG oslo.service.loopingcall [None req-f201dcc2-4634-49fd-8d11-54d315d7a13d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1991.071322] env[63024]: DEBUG nova.compute.manager [-] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1991.071438] env[63024]: DEBUG nova.network.neutron [-] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1991.122189] env[63024]: DEBUG oslo_vmware.api [None req-c13b0aaa-76f0-4dc9-843c-614ccef006d8 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951523, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1991.237178] env[63024]: DEBUG nova.compute.utils [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1991.238630] env[63024]: DEBUG nova.compute.manager [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1991.238813] env[63024]: DEBUG nova.network.neutron [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1991.256266] env[63024]: DEBUG oslo_vmware.api [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951524, 'name': ReconfigVM_Task, 'duration_secs': 0.170805} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1991.256266] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Reconfigured VM instance instance-00000041 to detach disk 2000 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1991.256266] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38f95f3b-bb5a-493c-a96d-23ba3899a9fd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.288827] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Reconfiguring VM instance instance-00000041 to attach disk [datastore1] fe6847e2-a742-4338-983f-698c13aaefde/fe6847e2-a742-4338-983f-698c13aaefde.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1991.292900] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-67674a1b-17ba-4503-8e6f-207913e69f03 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.318486] env[63024]: DEBUG oslo_vmware.api [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 1991.318486] env[63024]: value = "task-1951525" [ 1991.318486] env[63024]: _type = "Task" [ 1991.318486] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1991.319849] env[63024]: DEBUG nova.policy [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1efa94ebfd9143d7bb129313b3e3d5d0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a7fc70d467714e59b3c171a308feafdf', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1991.341890] env[63024]: DEBUG oslo_vmware.api [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]529c2a13-8ca4-29bf-eea3-bfd3f7042f2d, 'name': SearchDatastore_Task, 'duration_secs': 0.010224} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1991.342686] env[63024]: DEBUG oslo_vmware.api [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951525, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1991.347291] env[63024]: DEBUG oslo_concurrency.lockutils [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1991.347291] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] [instance: ec1f30e6-8410-4687-958f-f4e6e154b52f] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1991.347291] env[63024]: DEBUG oslo_concurrency.lockutils [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1991.347291] env[63024]: DEBUG oslo_concurrency.lockutils [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1991.347291] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1991.347870] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-41ebbe97-a24e-462c-8ed9-999f1163af95 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.363022] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1991.363022] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1991.363022] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51de152b-c6bb-48cf-b527-8a1af5a0a28c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.368979] env[63024]: DEBUG oslo_vmware.api [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Waiting for the task: (returnval){ [ 1991.368979] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52c08160-a672-45ed-f6af-efb814cb9708" [ 1991.368979] env[63024]: _type = "Task" [ 1991.368979] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1991.382711] env[63024]: DEBUG oslo_vmware.api [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52c08160-a672-45ed-f6af-efb814cb9708, 'name': SearchDatastore_Task, 'duration_secs': 0.012272} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1991.384074] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f02c008a-d1e1-48ca-8b0b-d57df0218d7a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.390362] env[63024]: DEBUG oslo_vmware.api [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Waiting for the task: (returnval){ [ 1991.390362] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52f04596-d08c-ffa7-0ae5-18d59a4fcc68" [ 1991.390362] env[63024]: _type = "Task" [ 1991.390362] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1991.403215] env[63024]: DEBUG oslo_vmware.api [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52f04596-d08c-ffa7-0ae5-18d59a4fcc68, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1991.566497] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7ae17958-bf6f-4ed2-ac39-e07f251c3b72 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1991.621324] env[63024]: DEBUG oslo_vmware.api [None req-c13b0aaa-76f0-4dc9-843c-614ccef006d8 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951523, 'name': ReconfigVM_Task, 'duration_secs': 0.898293} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1991.621617] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-c13b0aaa-76f0-4dc9-843c-614ccef006d8 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Reconfigured VM instance instance-00000055 to attach disk [datastore1] volume-5739f499-39f5-4d5e-8a12-f4f608420e01/volume-5739f499-39f5-4d5e-8a12-f4f608420e01.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1991.628753] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c79f0768-0b66-429f-897b-36785b4a616b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.645590] env[63024]: DEBUG oslo_vmware.api [None req-c13b0aaa-76f0-4dc9-843c-614ccef006d8 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 1991.645590] env[63024]: value = "task-1951526" [ 1991.645590] env[63024]: _type = "Task" [ 1991.645590] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1991.659816] env[63024]: DEBUG oslo_vmware.api [None req-c13b0aaa-76f0-4dc9-843c-614ccef006d8 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951526, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1991.745136] env[63024]: DEBUG nova.compute.manager [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1991.784394] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0c37929-3851-4f89-8630-03033f72dd7a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.792888] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75fc8776-9834-4323-b175-267fccbd8419 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.832093] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5c9cc2e-3f2c-452f-ab1f-9de0ac6aee82 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.843368] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-248ac338-3c40-4b6e-b2f5-ff5d3746d649 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.847277] env[63024]: DEBUG oslo_vmware.api [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951525, 'name': ReconfigVM_Task, 'duration_secs': 0.293446} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1991.847563] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Reconfigured VM instance instance-00000041 to attach disk [datastore1] fe6847e2-a742-4338-983f-698c13aaefde/fe6847e2-a742-4338-983f-698c13aaefde.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1991.847833] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Updating instance 'fe6847e2-a742-4338-983f-698c13aaefde' progress to 50 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1991.862681] env[63024]: DEBUG nova.compute.provider_tree [None req-d455036d-d8b0-4f37-a400-febca7a607d9 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1991.901435] env[63024]: DEBUG oslo_vmware.api [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52f04596-d08c-ffa7-0ae5-18d59a4fcc68, 'name': SearchDatastore_Task, 'duration_secs': 0.011506} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1991.901680] env[63024]: DEBUG oslo_concurrency.lockutils [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1991.901949] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] ec1f30e6-8410-4687-958f-f4e6e154b52f/ec1f30e6-8410-4687-958f-f4e6e154b52f.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1991.903193] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-91adfad1-2b12-4451-9fe6-9d27516df76f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.909962] env[63024]: DEBUG oslo_vmware.api [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Waiting for the task: (returnval){ [ 1991.909962] env[63024]: value = "task-1951527" [ 1991.909962] env[63024]: _type = "Task" [ 1991.909962] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1991.917938] env[63024]: DEBUG oslo_vmware.api [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Task: {'id': task-1951527, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1991.985334] env[63024]: DEBUG nova.network.neutron [-] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1992.046579] env[63024]: DEBUG nova.network.neutron [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Successfully created port: 1f29e0ef-5e8f-4e17-a724-f9270de55090 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1992.126078] env[63024]: DEBUG nova.compute.manager [req-61959ce7-ba77-4e27-8add-2d3997bc9e9a req-5d31cb51-54f2-4ad2-b66b-5bdf685cef83 service nova] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] Received event network-vif-deleted-19edf22a-26aa-401d-bda2-51e972825722 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1992.156234] env[63024]: DEBUG oslo_vmware.api [None req-c13b0aaa-76f0-4dc9-843c-614ccef006d8 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951526, 'name': ReconfigVM_Task, 'duration_secs': 0.173045} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1992.156574] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-c13b0aaa-76f0-4dc9-843c-614ccef006d8 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402210', 'volume_id': '5739f499-39f5-4d5e-8a12-f4f608420e01', 'name': 'volume-5739f499-39f5-4d5e-8a12-f4f608420e01', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ea24d375-ba88-42ca-a07e-52000ec613c0', 'attached_at': '', 'detached_at': '', 'volume_id': '5739f499-39f5-4d5e-8a12-f4f608420e01', 'serial': '5739f499-39f5-4d5e-8a12-f4f608420e01'} {{(pid=63024) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1992.354782] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42da8f73-3e12-4417-9147-92945ae3d0c0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.380556] env[63024]: DEBUG nova.scheduler.client.report [None req-d455036d-d8b0-4f37-a400-febca7a607d9 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1992.384873] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bccce8c0-21d1-4c02-93a7-6e11cbed0579 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.407285] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Updating instance 'fe6847e2-a742-4338-983f-698c13aaefde' progress to 67 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1992.420383] env[63024]: DEBUG oslo_vmware.api [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Task: {'id': task-1951527, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1992.488663] env[63024]: INFO nova.compute.manager [-] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] Took 1.42 seconds to deallocate network for instance. [ 1992.755517] env[63024]: DEBUG nova.compute.manager [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1992.786107] env[63024]: DEBUG nova.virt.hardware [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1992.786444] env[63024]: DEBUG nova.virt.hardware [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1992.786622] env[63024]: DEBUG nova.virt.hardware [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1992.786904] env[63024]: DEBUG nova.virt.hardware [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1992.787008] env[63024]: DEBUG nova.virt.hardware [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1992.787191] env[63024]: DEBUG nova.virt.hardware [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1992.787469] env[63024]: DEBUG nova.virt.hardware [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1992.787636] env[63024]: DEBUG nova.virt.hardware [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1992.787882] env[63024]: DEBUG nova.virt.hardware [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1992.788083] env[63024]: DEBUG nova.virt.hardware [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1992.788267] env[63024]: DEBUG nova.virt.hardware [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1992.789370] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfb9963b-ee0e-4de2-8586-34800fa6bb7b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.799784] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d301e752-456f-4810-a85c-76289f84e5c5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.888853] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d455036d-d8b0-4f37-a400-febca7a607d9 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.155s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1992.891514] env[63024]: DEBUG oslo_concurrency.lockutils [None req-521cf133-4c71-48ce-842b-94220fcaa5db tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 21.428s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1992.891861] env[63024]: DEBUG nova.objects.instance [None req-521cf133-4c71-48ce-842b-94220fcaa5db tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Lazy-loading 'resources' on Instance uuid 12e63b42-5554-44d5-86eb-d592bc0b2ad6 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1992.915988] env[63024]: INFO nova.scheduler.client.report [None req-d455036d-d8b0-4f37-a400-febca7a607d9 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Deleted allocations for instance 839776ef-0562-424d-b301-2aa896f32e14 [ 1992.928591] env[63024]: DEBUG oslo_vmware.api [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Task: {'id': task-1951527, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1992.979982] env[63024]: DEBUG nova.network.neutron [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Port a86b5113-d05e-45ac-bf54-833ea769eae5 binding to destination host cpu-1 is already ACTIVE {{(pid=63024) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1992.996667] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f201dcc2-4634-49fd-8d11-54d315d7a13d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1993.202731] env[63024]: DEBUG nova.objects.instance [None req-c13b0aaa-76f0-4dc9-843c-614ccef006d8 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lazy-loading 'flavor' on Instance uuid ea24d375-ba88-42ca-a07e-52000ec613c0 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1993.427058] env[63024]: DEBUG oslo_vmware.api [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Task: {'id': task-1951527, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.086851} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1993.429723] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] ec1f30e6-8410-4687-958f-f4e6e154b52f/ec1f30e6-8410-4687-958f-f4e6e154b52f.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1993.429958] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] [instance: ec1f30e6-8410-4687-958f-f4e6e154b52f] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1993.430683] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d455036d-d8b0-4f37-a400-febca7a607d9 tempest-AttachInterfacesUnderV243Test-1028945639 tempest-AttachInterfacesUnderV243Test-1028945639-project-member] Lock "839776ef-0562-424d-b301-2aa896f32e14" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.390s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1993.431655] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-447d00f2-bef6-4db3-8880-d593479c8612 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.442510] env[63024]: DEBUG oslo_vmware.api [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Waiting for the task: (returnval){ [ 1993.442510] env[63024]: value = "task-1951528" [ 1993.442510] env[63024]: _type = "Task" [ 1993.442510] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1993.455612] env[63024]: DEBUG oslo_vmware.api [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Task: {'id': task-1951528, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1993.710842] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c13b0aaa-76f0-4dc9-843c-614ccef006d8 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "ea24d375-ba88-42ca-a07e-52000ec613c0" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.808s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1993.801576] env[63024]: DEBUG nova.network.neutron [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Successfully updated port: 1f29e0ef-5e8f-4e17-a724-f9270de55090 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1993.838889] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76a009e8-7d58-40bc-8e51-9ed5f5f8f61b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.848106] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5e76876-4160-48e6-b06a-0c1d69a06d47 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.882567] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de4ee05b-7439-4ddf-866b-8fd68f0915e1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.891986] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-798896da-8392-42af-a65b-8861e3d7ff2c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.907335] env[63024]: DEBUG nova.compute.provider_tree [None req-521cf133-4c71-48ce-842b-94220fcaa5db tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1993.957296] env[63024]: DEBUG oslo_vmware.api [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Task: {'id': task-1951528, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078997} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1993.957296] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] [instance: ec1f30e6-8410-4687-958f-f4e6e154b52f] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1993.957296] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ed71d94-fcac-43a8-86da-c81249f74be0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.977224] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] [instance: ec1f30e6-8410-4687-958f-f4e6e154b52f] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] ec1f30e6-8410-4687-958f-f4e6e154b52f/ec1f30e6-8410-4687-958f-f4e6e154b52f.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1993.977521] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-375ba6c4-26fe-412c-888c-9f952e33a4f8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1994.010156] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "fe6847e2-a742-4338-983f-698c13aaefde-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1994.010426] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "fe6847e2-a742-4338-983f-698c13aaefde-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1994.010599] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "fe6847e2-a742-4338-983f-698c13aaefde-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1994.013238] env[63024]: DEBUG oslo_vmware.api [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Waiting for the task: (returnval){ [ 1994.013238] env[63024]: value = "task-1951529" [ 1994.013238] env[63024]: _type = "Task" [ 1994.013238] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1994.023960] env[63024]: DEBUG oslo_vmware.api [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Task: {'id': task-1951529, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1994.216177] env[63024]: DEBUG nova.compute.manager [req-e78aedae-8e15-4e3a-b7f3-b862f40d4856 req-09734507-0fb3-4574-845a-185a1ac9ab7b service nova] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Received event network-vif-plugged-1f29e0ef-5e8f-4e17-a724-f9270de55090 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1994.216409] env[63024]: DEBUG oslo_concurrency.lockutils [req-e78aedae-8e15-4e3a-b7f3-b862f40d4856 req-09734507-0fb3-4574-845a-185a1ac9ab7b service nova] Acquiring lock "da1f5cbc-47bf-4ee4-837a-b328de170489-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1994.216625] env[63024]: DEBUG oslo_concurrency.lockutils [req-e78aedae-8e15-4e3a-b7f3-b862f40d4856 req-09734507-0fb3-4574-845a-185a1ac9ab7b service nova] Lock "da1f5cbc-47bf-4ee4-837a-b328de170489-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1994.216792] env[63024]: DEBUG oslo_concurrency.lockutils [req-e78aedae-8e15-4e3a-b7f3-b862f40d4856 req-09734507-0fb3-4574-845a-185a1ac9ab7b service nova] Lock "da1f5cbc-47bf-4ee4-837a-b328de170489-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1994.216977] env[63024]: DEBUG nova.compute.manager [req-e78aedae-8e15-4e3a-b7f3-b862f40d4856 req-09734507-0fb3-4574-845a-185a1ac9ab7b service nova] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] No waiting events found dispatching network-vif-plugged-1f29e0ef-5e8f-4e17-a724-f9270de55090 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1994.217164] env[63024]: WARNING nova.compute.manager [req-e78aedae-8e15-4e3a-b7f3-b862f40d4856 req-09734507-0fb3-4574-845a-185a1ac9ab7b service nova] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Received unexpected event network-vif-plugged-1f29e0ef-5e8f-4e17-a724-f9270de55090 for instance with vm_state building and task_state spawning. [ 1994.217329] env[63024]: DEBUG nova.compute.manager [req-e78aedae-8e15-4e3a-b7f3-b862f40d4856 req-09734507-0fb3-4574-845a-185a1ac9ab7b service nova] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Received event network-changed-1f29e0ef-5e8f-4e17-a724-f9270de55090 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1994.217479] env[63024]: DEBUG nova.compute.manager [req-e78aedae-8e15-4e3a-b7f3-b862f40d4856 req-09734507-0fb3-4574-845a-185a1ac9ab7b service nova] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Refreshing instance network info cache due to event network-changed-1f29e0ef-5e8f-4e17-a724-f9270de55090. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 1994.217660] env[63024]: DEBUG oslo_concurrency.lockutils [req-e78aedae-8e15-4e3a-b7f3-b862f40d4856 req-09734507-0fb3-4574-845a-185a1ac9ab7b service nova] Acquiring lock "refresh_cache-da1f5cbc-47bf-4ee4-837a-b328de170489" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1994.217797] env[63024]: DEBUG oslo_concurrency.lockutils [req-e78aedae-8e15-4e3a-b7f3-b862f40d4856 req-09734507-0fb3-4574-845a-185a1ac9ab7b service nova] Acquired lock "refresh_cache-da1f5cbc-47bf-4ee4-837a-b328de170489" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1994.217950] env[63024]: DEBUG nova.network.neutron [req-e78aedae-8e15-4e3a-b7f3-b862f40d4856 req-09734507-0fb3-4574-845a-185a1ac9ab7b service nova] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Refreshing network info cache for port 1f29e0ef-5e8f-4e17-a724-f9270de55090 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1994.304400] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Acquiring lock "refresh_cache-da1f5cbc-47bf-4ee4-837a-b328de170489" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1994.410886] env[63024]: DEBUG nova.scheduler.client.report [None req-521cf133-4c71-48ce-842b-94220fcaa5db tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1994.531250] env[63024]: DEBUG oslo_vmware.api [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Task: {'id': task-1951529, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1994.755923] env[63024]: DEBUG nova.network.neutron [req-e78aedae-8e15-4e3a-b7f3-b862f40d4856 req-09734507-0fb3-4574-845a-185a1ac9ab7b service nova] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1994.850251] env[63024]: DEBUG nova.network.neutron [req-e78aedae-8e15-4e3a-b7f3-b862f40d4856 req-09734507-0fb3-4574-845a-185a1ac9ab7b service nova] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1994.916145] env[63024]: DEBUG oslo_concurrency.lockutils [None req-521cf133-4c71-48ce-842b-94220fcaa5db tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.025s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1994.923300] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.117s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1994.925424] env[63024]: INFO nova.compute.claims [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1994.954924] env[63024]: INFO nova.scheduler.client.report [None req-521cf133-4c71-48ce-842b-94220fcaa5db tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Deleted allocations for instance 12e63b42-5554-44d5-86eb-d592bc0b2ad6 [ 1995.036528] env[63024]: DEBUG oslo_vmware.api [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Task: {'id': task-1951529, 'name': ReconfigVM_Task, 'duration_secs': 0.906488} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1995.036860] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] [instance: ec1f30e6-8410-4687-958f-f4e6e154b52f] Reconfigured VM instance instance-0000005d to attach disk [datastore1] ec1f30e6-8410-4687-958f-f4e6e154b52f/ec1f30e6-8410-4687-958f-f4e6e154b52f.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1995.037501] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6b49fd14-8726-4fa9-878b-9c1ebe7df672 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.047946] env[63024]: DEBUG oslo_vmware.api [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Waiting for the task: (returnval){ [ 1995.047946] env[63024]: value = "task-1951530" [ 1995.047946] env[63024]: _type = "Task" [ 1995.047946] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1995.059528] env[63024]: DEBUG oslo_vmware.api [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Task: {'id': task-1951530, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1995.094499] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "refresh_cache-fe6847e2-a742-4338-983f-698c13aaefde" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1995.094499] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquired lock "refresh_cache-fe6847e2-a742-4338-983f-698c13aaefde" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1995.095149] env[63024]: DEBUG nova.network.neutron [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1995.354842] env[63024]: DEBUG oslo_concurrency.lockutils [req-e78aedae-8e15-4e3a-b7f3-b862f40d4856 req-09734507-0fb3-4574-845a-185a1ac9ab7b service nova] Releasing lock "refresh_cache-da1f5cbc-47bf-4ee4-837a-b328de170489" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1995.354842] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Acquired lock "refresh_cache-da1f5cbc-47bf-4ee4-837a-b328de170489" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1995.354842] env[63024]: DEBUG nova.network.neutron [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1995.463895] env[63024]: DEBUG oslo_concurrency.lockutils [None req-521cf133-4c71-48ce-842b-94220fcaa5db tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Lock "12e63b42-5554-44d5-86eb-d592bc0b2ad6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 28.028s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1995.561108] env[63024]: DEBUG oslo_vmware.api [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Task: {'id': task-1951530, 'name': Rename_Task, 'duration_secs': 0.153236} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1995.561396] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] [instance: ec1f30e6-8410-4687-958f-f4e6e154b52f] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1995.561715] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-716a70ca-bf66-4e30-a2fc-fc090e61abee {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1995.571210] env[63024]: DEBUG oslo_vmware.api [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Waiting for the task: (returnval){ [ 1995.571210] env[63024]: value = "task-1951531" [ 1995.571210] env[63024]: _type = "Task" [ 1995.571210] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1995.584573] env[63024]: DEBUG oslo_vmware.api [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Task: {'id': task-1951531, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1995.931152] env[63024]: DEBUG nova.network.neutron [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1996.046188] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquiring lock "3cf2726c-2551-4bbd-8032-006062cdcc39" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1996.046442] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "3cf2726c-2551-4bbd-8032-006062cdcc39" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1996.088879] env[63024]: DEBUG oslo_vmware.api [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Task: {'id': task-1951531, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1996.248431] env[63024]: DEBUG nova.network.neutron [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Updating instance_info_cache with network_info: [{"id": "a86b5113-d05e-45ac-bf54-833ea769eae5", "address": "fa:16:3e:7e:0d:a2", "network": {"id": "27687c23-521d-4beb-ad4f-278994149c2c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-381619610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.138", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e53c02ad56640dc8cbc8839669b67bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "09bf081b-cdf0-4977-abe2-2339a87409ab", "external-id": "nsx-vlan-transportzone-378", "segmentation_id": 378, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa86b5113-d0", "ovs_interfaceid": "a86b5113-d05e-45ac-bf54-833ea769eae5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1996.334065] env[63024]: DEBUG nova.network.neutron [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Updating instance_info_cache with network_info: [{"id": "1f29e0ef-5e8f-4e17-a724-f9270de55090", "address": "fa:16:3e:89:b7:fe", "network": {"id": "2f5cbe03-419b-4995-837c-3389d94c2be3", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1864944898-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a7fc70d467714e59b3c171a308feafdf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0d2101e-2d93-4310-a242-af2d9ecdaf9b", "external-id": "nsx-vlan-transportzone-121", "segmentation_id": 121, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f29e0ef-5e", "ovs_interfaceid": "1f29e0ef-5e8f-4e17-a724-f9270de55090", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1996.413975] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a16c5381-d72d-48e7-8d2d-38f50422593a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.426277] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18bceef2-b541-4782-bb80-21149dbabfa8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.467839] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-074ceb17-3081-42e9-8b96-ef20e7d0760e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.477237] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-268a0093-1ab2-4ebf-8e52-675bbaeea26b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.493976] env[63024]: DEBUG nova.compute.provider_tree [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1996.548549] env[63024]: DEBUG nova.compute.manager [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1996.583361] env[63024]: DEBUG oslo_vmware.api [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Task: {'id': task-1951531, 'name': PowerOnVM_Task, 'duration_secs': 0.654743} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1996.583361] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] [instance: ec1f30e6-8410-4687-958f-f4e6e154b52f] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1996.583361] env[63024]: INFO nova.compute.manager [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] [instance: ec1f30e6-8410-4687-958f-f4e6e154b52f] Took 6.43 seconds to spawn the instance on the hypervisor. [ 1996.583547] env[63024]: DEBUG nova.compute.manager [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] [instance: ec1f30e6-8410-4687-958f-f4e6e154b52f] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1996.584394] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21c98f99-1606-40a8-a7dc-f3652cbfc2aa {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.751542] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Releasing lock "refresh_cache-fe6847e2-a742-4338-983f-698c13aaefde" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1996.837984] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Releasing lock "refresh_cache-da1f5cbc-47bf-4ee4-837a-b328de170489" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1996.838448] env[63024]: DEBUG nova.compute.manager [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Instance network_info: |[{"id": "1f29e0ef-5e8f-4e17-a724-f9270de55090", "address": "fa:16:3e:89:b7:fe", "network": {"id": "2f5cbe03-419b-4995-837c-3389d94c2be3", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1864944898-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a7fc70d467714e59b3c171a308feafdf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0d2101e-2d93-4310-a242-af2d9ecdaf9b", "external-id": "nsx-vlan-transportzone-121", "segmentation_id": 121, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f29e0ef-5e", "ovs_interfaceid": "1f29e0ef-5e8f-4e17-a724-f9270de55090", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1996.839179] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:89:b7:fe', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a0d2101e-2d93-4310-a242-af2d9ecdaf9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1f29e0ef-5e8f-4e17-a724-f9270de55090', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1996.846705] env[63024]: DEBUG oslo.service.loopingcall [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1996.846945] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1996.847191] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4d01fd3a-f931-4f70-9f9b-db16609a2a29 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1996.869140] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1996.869140] env[63024]: value = "task-1951532" [ 1996.869140] env[63024]: _type = "Task" [ 1996.869140] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1996.878568] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951532, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1996.997027] env[63024]: DEBUG nova.scheduler.client.report [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1997.079259] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1997.104157] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4e8b7539-2399-427e-b262-bcb4679c86c0 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Acquiring lock "c71abfaa-dc65-4d1b-8a34-dff9dd682fe7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1997.104425] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4e8b7539-2399-427e-b262-bcb4679c86c0 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Lock "c71abfaa-dc65-4d1b-8a34-dff9dd682fe7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1997.105558] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4e8b7539-2399-427e-b262-bcb4679c86c0 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Acquiring lock "c71abfaa-dc65-4d1b-8a34-dff9dd682fe7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1997.105558] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4e8b7539-2399-427e-b262-bcb4679c86c0 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Lock "c71abfaa-dc65-4d1b-8a34-dff9dd682fe7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1997.105558] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4e8b7539-2399-427e-b262-bcb4679c86c0 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Lock "c71abfaa-dc65-4d1b-8a34-dff9dd682fe7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1997.109712] env[63024]: INFO nova.compute.manager [None req-4e8b7539-2399-427e-b262-bcb4679c86c0 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Terminating instance [ 1997.111080] env[63024]: INFO nova.compute.manager [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] [instance: ec1f30e6-8410-4687-958f-f4e6e154b52f] Took 31.65 seconds to build instance. [ 1997.168970] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c57faf6e-f683-4795-a9cc-69c242c896a2 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Acquiring lock "6d21976b-f519-4c87-a0d2-0a406060608d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1997.169295] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c57faf6e-f683-4795-a9cc-69c242c896a2 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Lock "6d21976b-f519-4c87-a0d2-0a406060608d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1997.169503] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c57faf6e-f683-4795-a9cc-69c242c896a2 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Acquiring lock "6d21976b-f519-4c87-a0d2-0a406060608d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1997.169692] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c57faf6e-f683-4795-a9cc-69c242c896a2 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Lock "6d21976b-f519-4c87-a0d2-0a406060608d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1997.169862] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c57faf6e-f683-4795-a9cc-69c242c896a2 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Lock "6d21976b-f519-4c87-a0d2-0a406060608d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1997.172260] env[63024]: INFO nova.compute.manager [None req-c57faf6e-f683-4795-a9cc-69c242c896a2 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Terminating instance [ 1997.299829] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7eeda1d-b2b9-411c-a999-837ca2881163 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.324116] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d10433d5-822f-45fc-958b-93c5e164613e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.337165] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Updating instance 'fe6847e2-a742-4338-983f-698c13aaefde' progress to 83 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1997.382879] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951532, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1997.503020] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.580s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1997.503669] env[63024]: DEBUG nova.compute.manager [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1997.506270] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.388s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1997.507806] env[63024]: INFO nova.compute.claims [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1997.613449] env[63024]: DEBUG oslo_concurrency.lockutils [None req-373b3010-8f93-47b4-a9c0-dd422e16593d tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Lock "ec1f30e6-8410-4687-958f-f4e6e154b52f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.157s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1997.614191] env[63024]: DEBUG nova.compute.manager [None req-4e8b7539-2399-427e-b262-bcb4679c86c0 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1997.614306] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4e8b7539-2399-427e-b262-bcb4679c86c0 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1997.615525] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0d5d837-14f9-465d-8561-5b388e68b568 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.626814] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e8b7539-2399-427e-b262-bcb4679c86c0 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1997.627284] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-de57564c-8f7e-4927-8ca8-5b7836d51244 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.635405] env[63024]: DEBUG oslo_vmware.api [None req-4e8b7539-2399-427e-b262-bcb4679c86c0 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Waiting for the task: (returnval){ [ 1997.635405] env[63024]: value = "task-1951533" [ 1997.635405] env[63024]: _type = "Task" [ 1997.635405] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1997.645959] env[63024]: DEBUG oslo_vmware.api [None req-4e8b7539-2399-427e-b262-bcb4679c86c0 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951533, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1997.679026] env[63024]: DEBUG nova.compute.manager [None req-c57faf6e-f683-4795-a9cc-69c242c896a2 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1997.679026] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c57faf6e-f683-4795-a9cc-69c242c896a2 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1997.679026] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d031a03-26d7-4d1d-b300-b5c859a56164 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.688915] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-c57faf6e-f683-4795-a9cc-69c242c896a2 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1997.688915] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2ccb6c5d-8420-41f4-a94e-94cc0b41707f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.699659] env[63024]: DEBUG oslo_vmware.api [None req-c57faf6e-f683-4795-a9cc-69c242c896a2 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Waiting for the task: (returnval){ [ 1997.699659] env[63024]: value = "task-1951534" [ 1997.699659] env[63024]: _type = "Task" [ 1997.699659] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1997.708844] env[63024]: DEBUG oslo_vmware.api [None req-c57faf6e-f683-4795-a9cc-69c242c896a2 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951534, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1997.831783] env[63024]: DEBUG nova.compute.manager [None req-9c7d45e3-974c-4d2e-97ba-c6840a19ae55 tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] [instance: ec1f30e6-8410-4687-958f-f4e6e154b52f] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1997.832991] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9cde8df-3928-47f0-ad43-07f7ecb46796 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1997.840669] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0f50873d-b33f-4b16-a72f-34251c73f067 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "c28e7c21-7e7d-4cda-81e8-63538bd8a1f7" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1997.840924] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0f50873d-b33f-4b16-a72f-34251c73f067 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "c28e7c21-7e7d-4cda-81e8-63538bd8a1f7" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1997.846411] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-8faacc2f-902e-4b11-9b17-8acd51a14001 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Updating instance 'fe6847e2-a742-4338-983f-698c13aaefde' progress to 100 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1997.887963] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951532, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1997.899629] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4f6acf84-01b1-4e66-9eed-dce0ef58b22e tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Acquiring lock "ec1f30e6-8410-4687-958f-f4e6e154b52f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1997.899996] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4f6acf84-01b1-4e66-9eed-dce0ef58b22e tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Lock "ec1f30e6-8410-4687-958f-f4e6e154b52f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1997.900281] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4f6acf84-01b1-4e66-9eed-dce0ef58b22e tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Acquiring lock "ec1f30e6-8410-4687-958f-f4e6e154b52f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1997.900521] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4f6acf84-01b1-4e66-9eed-dce0ef58b22e tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Lock "ec1f30e6-8410-4687-958f-f4e6e154b52f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1997.900772] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4f6acf84-01b1-4e66-9eed-dce0ef58b22e tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Lock "ec1f30e6-8410-4687-958f-f4e6e154b52f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1997.903477] env[63024]: INFO nova.compute.manager [None req-4f6acf84-01b1-4e66-9eed-dce0ef58b22e tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] [instance: ec1f30e6-8410-4687-958f-f4e6e154b52f] Terminating instance [ 1998.018045] env[63024]: DEBUG nova.compute.utils [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1998.018045] env[63024]: DEBUG nova.compute.manager [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1998.018045] env[63024]: DEBUG nova.network.neutron [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1998.072252] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1998.072479] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1998.132492] env[63024]: DEBUG nova.policy [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f12eeccbe5da4cc288e77efe5e8569a1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a01553100a8f4078bb57a8149179d5d1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 1998.146913] env[63024]: DEBUG oslo_vmware.api [None req-4e8b7539-2399-427e-b262-bcb4679c86c0 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951533, 'name': PowerOffVM_Task, 'duration_secs': 0.223712} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1998.146913] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e8b7539-2399-427e-b262-bcb4679c86c0 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1998.146913] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4e8b7539-2399-427e-b262-bcb4679c86c0 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1998.146913] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b3fc6b8e-f961-4434-98f4-16fc27fe23fa {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.210789] env[63024]: DEBUG oslo_vmware.api [None req-c57faf6e-f683-4795-a9cc-69c242c896a2 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951534, 'name': PowerOffVM_Task, 'duration_secs': 0.173007} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1998.211246] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-c57faf6e-f683-4795-a9cc-69c242c896a2 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1998.211546] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c57faf6e-f683-4795-a9cc-69c242c896a2 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1998.211911] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d743908c-6981-4d68-a1a4-6146e0862fbb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.254132] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4e8b7539-2399-427e-b262-bcb4679c86c0 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1998.254132] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4e8b7539-2399-427e-b262-bcb4679c86c0 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1998.254132] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e8b7539-2399-427e-b262-bcb4679c86c0 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Deleting the datastore file [datastore1] c71abfaa-dc65-4d1b-8a34-dff9dd682fe7 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1998.254132] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-de36cc89-586f-41eb-9dbe-e4be3e9368c1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.261393] env[63024]: DEBUG oslo_vmware.api [None req-4e8b7539-2399-427e-b262-bcb4679c86c0 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Waiting for the task: (returnval){ [ 1998.261393] env[63024]: value = "task-1951537" [ 1998.261393] env[63024]: _type = "Task" [ 1998.261393] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1998.270105] env[63024]: DEBUG oslo_vmware.api [None req-4e8b7539-2399-427e-b262-bcb4679c86c0 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951537, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1998.347068] env[63024]: INFO nova.compute.manager [None req-0f50873d-b33f-4b16-a72f-34251c73f067 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Detaching volume 2cfc46b0-10e9-4f4c-8f58-7fff36954695 [ 1998.354631] env[63024]: INFO nova.compute.manager [None req-9c7d45e3-974c-4d2e-97ba-c6840a19ae55 tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] [instance: ec1f30e6-8410-4687-958f-f4e6e154b52f] instance snapshotting [ 1998.355465] env[63024]: DEBUG nova.objects.instance [None req-9c7d45e3-974c-4d2e-97ba-c6840a19ae55 tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Lazy-loading 'flavor' on Instance uuid ec1f30e6-8410-4687-958f-f4e6e154b52f {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1998.383858] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c57faf6e-f683-4795-a9cc-69c242c896a2 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1998.384414] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c57faf6e-f683-4795-a9cc-69c242c896a2 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1998.384770] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-c57faf6e-f683-4795-a9cc-69c242c896a2 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Deleting the datastore file [datastore1] 6d21976b-f519-4c87-a0d2-0a406060608d {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1998.388518] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-923021e9-d02f-4726-953d-14739814155b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.391091] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951532, 'name': CreateVM_Task, 'duration_secs': 1.124805} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1998.391366] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1998.392561] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1998.392876] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1998.393312] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1998.393710] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc390762-9e9c-42ef-8e05-e04d9d84ff9d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.400017] env[63024]: DEBUG oslo_vmware.api [None req-c57faf6e-f683-4795-a9cc-69c242c896a2 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Waiting for the task: (returnval){ [ 1998.400017] env[63024]: value = "task-1951538" [ 1998.400017] env[63024]: _type = "Task" [ 1998.400017] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1998.400017] env[63024]: DEBUG oslo_vmware.api [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Waiting for the task: (returnval){ [ 1998.400017] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]522345d3-150a-c3e7-ae6c-8ab9d95090f1" [ 1998.400017] env[63024]: _type = "Task" [ 1998.400017] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1998.405427] env[63024]: INFO nova.virt.block_device [None req-0f50873d-b33f-4b16-a72f-34251c73f067 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Attempting to driver detach volume 2cfc46b0-10e9-4f4c-8f58-7fff36954695 from mountpoint /dev/sdb [ 1998.405806] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f50873d-b33f-4b16-a72f-34251c73f067 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Volume detach. Driver type: vmdk {{(pid=63024) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1998.406163] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f50873d-b33f-4b16-a72f-34251c73f067 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402188', 'volume_id': '2cfc46b0-10e9-4f4c-8f58-7fff36954695', 'name': 'volume-2cfc46b0-10e9-4f4c-8f58-7fff36954695', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c28e7c21-7e7d-4cda-81e8-63538bd8a1f7', 'attached_at': '', 'detached_at': '', 'volume_id': '2cfc46b0-10e9-4f4c-8f58-7fff36954695', 'serial': '2cfc46b0-10e9-4f4c-8f58-7fff36954695'} {{(pid=63024) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1998.410272] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8981faea-b3bd-401b-880c-f432d28c614b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.414038] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4f6acf84-01b1-4e66-9eed-dce0ef58b22e tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Acquiring lock "refresh_cache-ec1f30e6-8410-4687-958f-f4e6e154b52f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1998.414400] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4f6acf84-01b1-4e66-9eed-dce0ef58b22e tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Acquired lock "refresh_cache-ec1f30e6-8410-4687-958f-f4e6e154b52f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1998.414725] env[63024]: DEBUG nova.network.neutron [None req-4f6acf84-01b1-4e66-9eed-dce0ef58b22e tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] [instance: ec1f30e6-8410-4687-958f-f4e6e154b52f] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1998.420278] env[63024]: DEBUG oslo_vmware.api [None req-c57faf6e-f683-4795-a9cc-69c242c896a2 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951538, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1998.425576] env[63024]: DEBUG oslo_vmware.api [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]522345d3-150a-c3e7-ae6c-8ab9d95090f1, 'name': SearchDatastore_Task} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1998.443922] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1998.444578] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1998.445086] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1998.445401] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1998.445729] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1998.446715] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-80eddf6f-e618-4b6b-8f3d-51470eb358a5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.451661] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ffafdbc-c832-45d0-8dfb-761701131b0c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.463072] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-612f5f52-7a97-4a58-b9bf-31f8fd7dda79 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.469238] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1998.469679] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1998.472088] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e339c800-15d9-49fb-ac39-0906ee5169f9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.493531] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9673268-82a1-4d7d-85fc-106757ba47c0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.498041] env[63024]: DEBUG oslo_vmware.api [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Waiting for the task: (returnval){ [ 1998.498041] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]523fd569-63ba-e49d-7bae-cbb69e7241b6" [ 1998.498041] env[63024]: _type = "Task" [ 1998.498041] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1998.513150] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f50873d-b33f-4b16-a72f-34251c73f067 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] The volume has not been displaced from its original location: [datastore1] volume-2cfc46b0-10e9-4f4c-8f58-7fff36954695/volume-2cfc46b0-10e9-4f4c-8f58-7fff36954695.vmdk. No consolidation needed. {{(pid=63024) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1998.520563] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f50873d-b33f-4b16-a72f-34251c73f067 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Reconfiguring VM instance instance-0000004d to detach disk 2001 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1998.524644] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0098c663-ab02-4c0d-b313-5ddca0456417 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.545390] env[63024]: DEBUG nova.compute.manager [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1998.547031] env[63024]: DEBUG oslo_vmware.api [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]523fd569-63ba-e49d-7bae-cbb69e7241b6, 'name': SearchDatastore_Task, 'duration_secs': 0.012328} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1998.548383] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-baf89f6c-8d05-4846-85e1-9dae61d51114 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.555210] env[63024]: DEBUG oslo_vmware.api [None req-0f50873d-b33f-4b16-a72f-34251c73f067 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1998.555210] env[63024]: value = "task-1951539" [ 1998.555210] env[63024]: _type = "Task" [ 1998.555210] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1998.556940] env[63024]: DEBUG oslo_vmware.api [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Waiting for the task: (returnval){ [ 1998.556940] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52375eb3-ebdb-3f08-cfc1-9c9e8d15d0ef" [ 1998.556940] env[63024]: _type = "Task" [ 1998.556940] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1998.576634] env[63024]: DEBUG oslo_vmware.api [None req-0f50873d-b33f-4b16-a72f-34251c73f067 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951539, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1998.582474] env[63024]: DEBUG oslo_vmware.api [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52375eb3-ebdb-3f08-cfc1-9c9e8d15d0ef, 'name': SearchDatastore_Task, 'duration_secs': 0.010885} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1998.586802] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1998.586802] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] da1f5cbc-47bf-4ee4-837a-b328de170489/da1f5cbc-47bf-4ee4-837a-b328de170489.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1998.587141] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ff9b3cc7-794c-431b-a9fb-7ea7652c9f24 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.589446] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1998.589617] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Starting heal instance info cache {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10258}} [ 1998.589811] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Rebuilding the list of instances to heal {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10262}} [ 1998.600048] env[63024]: DEBUG oslo_vmware.api [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Waiting for the task: (returnval){ [ 1998.600048] env[63024]: value = "task-1951540" [ 1998.600048] env[63024]: _type = "Task" [ 1998.600048] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1998.610266] env[63024]: DEBUG oslo_vmware.api [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951540, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1998.774434] env[63024]: DEBUG oslo_vmware.api [None req-4e8b7539-2399-427e-b262-bcb4679c86c0 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951537, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.269894} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1998.775289] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e8b7539-2399-427e-b262-bcb4679c86c0 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1998.778973] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4e8b7539-2399-427e-b262-bcb4679c86c0 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1998.778973] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4e8b7539-2399-427e-b262-bcb4679c86c0 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1998.778973] env[63024]: INFO nova.compute.manager [None req-4e8b7539-2399-427e-b262-bcb4679c86c0 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1998.778973] env[63024]: DEBUG oslo.service.loopingcall [None req-4e8b7539-2399-427e-b262-bcb4679c86c0 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1998.778973] env[63024]: DEBUG nova.compute.manager [-] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1998.778973] env[63024]: DEBUG nova.network.neutron [-] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1998.879157] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b164bf41-4b05-4bc4-a4d8-5af39f1960f6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.899493] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa47b856-425a-4883-bfa6-28d7b4e70dac {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1998.920441] env[63024]: DEBUG oslo_vmware.api [None req-c57faf6e-f683-4795-a9cc-69c242c896a2 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Task: {'id': task-1951538, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.192368} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1998.923743] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-c57faf6e-f683-4795-a9cc-69c242c896a2 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1998.923855] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c57faf6e-f683-4795-a9cc-69c242c896a2 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1998.924080] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c57faf6e-f683-4795-a9cc-69c242c896a2 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1998.924287] env[63024]: INFO nova.compute.manager [None req-c57faf6e-f683-4795-a9cc-69c242c896a2 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Took 1.25 seconds to destroy the instance on the hypervisor. [ 1998.924560] env[63024]: DEBUG oslo.service.loopingcall [None req-c57faf6e-f683-4795-a9cc-69c242c896a2 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1998.927097] env[63024]: DEBUG nova.compute.manager [-] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1998.927209] env[63024]: DEBUG nova.network.neutron [-] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1999.012128] env[63024]: DEBUG nova.network.neutron [None req-4f6acf84-01b1-4e66-9eed-dce0ef58b22e tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] [instance: ec1f30e6-8410-4687-958f-f4e6e154b52f] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1999.064584] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7da3de12-4a89-49a5-80b3-174bf78eed02 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.075248] env[63024]: DEBUG oslo_vmware.api [None req-0f50873d-b33f-4b16-a72f-34251c73f067 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951539, 'name': ReconfigVM_Task, 'duration_secs': 0.287808} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1999.077622] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f50873d-b33f-4b16-a72f-34251c73f067 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Reconfigured VM instance instance-0000004d to detach disk 2001 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1999.083231] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a90be829-fc02-42e6-b40e-72fda058b82d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.095096] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7af309c-df2d-4c7c-b8eb-2cb104650932 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.104028] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Skipping network cache update for instance because it is being deleted. {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10275}} [ 1999.104480] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Skipping network cache update for instance because it is being deleted. {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10275}} [ 1999.104693] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: ec1f30e6-8410-4687-958f-f4e6e154b52f] Skipping network cache update for instance because it is being deleted. {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10275}} [ 1999.104944] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Skipping network cache update for instance because it is Building. {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10271}} [ 1999.105201] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Skipping network cache update for instance because it is Building. {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10271}} [ 1999.105201] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Skipping network cache update for instance because it is Building. {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10271}} [ 1999.115045] env[63024]: DEBUG nova.network.neutron [None req-4f6acf84-01b1-4e66-9eed-dce0ef58b22e tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] [instance: ec1f30e6-8410-4687-958f-f4e6e154b52f] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1999.118925] env[63024]: DEBUG oslo_vmware.api [None req-0f50873d-b33f-4b16-a72f-34251c73f067 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 1999.118925] env[63024]: value = "task-1951541" [ 1999.118925] env[63024]: _type = "Task" [ 1999.118925] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1999.154463] env[63024]: DEBUG oslo_vmware.api [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951540, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1999.155719] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a1c4188-5e28-4c79-a7b4-426ab9461fc7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.163142] env[63024]: DEBUG oslo_vmware.api [None req-0f50873d-b33f-4b16-a72f-34251c73f067 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951541, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1999.169414] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06293013-49de-4771-8547-669ed8254acc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.185157] env[63024]: DEBUG nova.compute.provider_tree [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1999.248496] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "refresh_cache-9716d592-32d1-4f1d-b42b-1c8a7d81d2f2" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1999.248496] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquired lock "refresh_cache-9716d592-32d1-4f1d-b42b-1c8a7d81d2f2" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1999.248877] env[63024]: DEBUG nova.network.neutron [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Forcefully refreshing network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 1999.248877] env[63024]: DEBUG nova.objects.instance [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lazy-loading 'info_cache' on Instance uuid 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1999.264648] env[63024]: DEBUG nova.network.neutron [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Successfully created port: ba67287f-e2da-402f-bab6-b8d2b9226aff {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1999.423990] env[63024]: DEBUG nova.compute.manager [None req-9c7d45e3-974c-4d2e-97ba-c6840a19ae55 tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] [instance: ec1f30e6-8410-4687-958f-f4e6e154b52f] Instance disappeared during snapshot {{(pid=63024) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4581}} [ 1999.560130] env[63024]: DEBUG nova.compute.manager [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1999.577490] env[63024]: DEBUG nova.compute.manager [None req-9c7d45e3-974c-4d2e-97ba-c6840a19ae55 tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] [instance: ec1f30e6-8410-4687-958f-f4e6e154b52f] Found 0 images (rotation: 2) {{(pid=63024) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4884}} [ 1999.599019] env[63024]: DEBUG nova.virt.hardware [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1999.599019] env[63024]: DEBUG nova.virt.hardware [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1999.599019] env[63024]: DEBUG nova.virt.hardware [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1999.599019] env[63024]: DEBUG nova.virt.hardware [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1999.599019] env[63024]: DEBUG nova.virt.hardware [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1999.599019] env[63024]: DEBUG nova.virt.hardware [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1999.599019] env[63024]: DEBUG nova.virt.hardware [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1999.599019] env[63024]: DEBUG nova.virt.hardware [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1999.599019] env[63024]: DEBUG nova.virt.hardware [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1999.599019] env[63024]: DEBUG nova.virt.hardware [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1999.599019] env[63024]: DEBUG nova.virt.hardware [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1999.599019] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e10d9d4f-7cea-42fc-9799-2ad0899c0dfa {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.611916] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea3f7f92-ea7a-4c17-8139-084bbef2df23 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.618268] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4f6acf84-01b1-4e66-9eed-dce0ef58b22e tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Releasing lock "refresh_cache-ec1f30e6-8410-4687-958f-f4e6e154b52f" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1999.618946] env[63024]: DEBUG nova.compute.manager [None req-4f6acf84-01b1-4e66-9eed-dce0ef58b22e tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] [instance: ec1f30e6-8410-4687-958f-f4e6e154b52f] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1999.618946] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4f6acf84-01b1-4e66-9eed-dce0ef58b22e tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] [instance: ec1f30e6-8410-4687-958f-f4e6e154b52f] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1999.630383] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a500b61-53d1-49c7-bffb-94de50241f31 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.634034] env[63024]: DEBUG oslo_vmware.api [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951540, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.565109} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1999.637138] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] da1f5cbc-47bf-4ee4-837a-b328de170489/da1f5cbc-47bf-4ee4-837a-b328de170489.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1999.637249] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1999.638278] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-845724b4-81dd-405b-a2d7-da1fc4f76425 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.642930] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f6acf84-01b1-4e66-9eed-dce0ef58b22e tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] [instance: ec1f30e6-8410-4687-958f-f4e6e154b52f] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1999.645904] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e009e773-8a19-404d-8987-947a2b0f272c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1999.647425] env[63024]: DEBUG oslo_vmware.api [None req-0f50873d-b33f-4b16-a72f-34251c73f067 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951541, 'name': ReconfigVM_Task, 'duration_secs': 0.159855} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1999.648626] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-0f50873d-b33f-4b16-a72f-34251c73f067 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402188', 'volume_id': '2cfc46b0-10e9-4f4c-8f58-7fff36954695', 'name': 'volume-2cfc46b0-10e9-4f4c-8f58-7fff36954695', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'c28e7c21-7e7d-4cda-81e8-63538bd8a1f7', 'attached_at': '', 'detached_at': '', 'volume_id': '2cfc46b0-10e9-4f4c-8f58-7fff36954695', 'serial': '2cfc46b0-10e9-4f4c-8f58-7fff36954695'} {{(pid=63024) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1999.650804] env[63024]: DEBUG oslo_vmware.api [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Waiting for the task: (returnval){ [ 1999.650804] env[63024]: value = "task-1951542" [ 1999.650804] env[63024]: _type = "Task" [ 1999.650804] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1999.657711] env[63024]: DEBUG oslo_vmware.api [None req-4f6acf84-01b1-4e66-9eed-dce0ef58b22e tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Waiting for the task: (returnval){ [ 1999.657711] env[63024]: value = "task-1951543" [ 1999.657711] env[63024]: _type = "Task" [ 1999.657711] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1999.663861] env[63024]: DEBUG oslo_vmware.api [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951542, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1999.671917] env[63024]: DEBUG oslo_vmware.api [None req-4f6acf84-01b1-4e66-9eed-dce0ef58b22e tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Task: {'id': task-1951543, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1999.689030] env[63024]: DEBUG nova.scheduler.client.report [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 1999.830056] env[63024]: DEBUG nova.compute.manager [req-dccced46-ba77-4c6c-84da-d8d5b32d4bea req-dafeb72d-8389-49f2-8754-e34e82758c78 service nova] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Received event network-vif-deleted-71970f6a-12a4-4779-832b-7b00ff046697 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 1999.830349] env[63024]: INFO nova.compute.manager [req-dccced46-ba77-4c6c-84da-d8d5b32d4bea req-dafeb72d-8389-49f2-8754-e34e82758c78 service nova] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Neutron deleted interface 71970f6a-12a4-4779-832b-7b00ff046697; detaching it from the instance and deleting it from the info cache [ 1999.833037] env[63024]: DEBUG nova.network.neutron [req-dccced46-ba77-4c6c-84da-d8d5b32d4bea req-dafeb72d-8389-49f2-8754-e34e82758c78 service nova] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2000.166305] env[63024]: DEBUG oslo_vmware.api [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951542, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081677} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2000.169287] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2000.169600] env[63024]: DEBUG oslo_vmware.api [None req-4f6acf84-01b1-4e66-9eed-dce0ef58b22e tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Task: {'id': task-1951543, 'name': PowerOffVM_Task, 'duration_secs': 0.147177} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2000.170393] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1093f972-2439-435f-9a8e-b7d42369e6e7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.173755] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f6acf84-01b1-4e66-9eed-dce0ef58b22e tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] [instance: ec1f30e6-8410-4687-958f-f4e6e154b52f] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2000.173755] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4f6acf84-01b1-4e66-9eed-dce0ef58b22e tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] [instance: ec1f30e6-8410-4687-958f-f4e6e154b52f] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2000.173755] env[63024]: DEBUG nova.network.neutron [-] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2000.174619] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f85d53aa-5932-4898-b0df-5b6c65bbed6f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.200537] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Reconfiguring VM instance instance-0000005e to attach disk [datastore1] da1f5cbc-47bf-4ee4-837a-b328de170489/da1f5cbc-47bf-4ee4-837a-b328de170489.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2000.203257] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.697s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2000.203848] env[63024]: DEBUG nova.compute.manager [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2000.206749] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4bce0b54-b48f-4241-ad29-753430d6d0fc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.221932] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4f6acf84-01b1-4e66-9eed-dce0ef58b22e tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] [instance: ec1f30e6-8410-4687-958f-f4e6e154b52f] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2000.223044] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4f6acf84-01b1-4e66-9eed-dce0ef58b22e tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] [instance: ec1f30e6-8410-4687-958f-f4e6e154b52f] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2000.223044] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f6acf84-01b1-4e66-9eed-dce0ef58b22e tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Deleting the datastore file [datastore1] ec1f30e6-8410-4687-958f-f4e6e154b52f {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2000.223708] env[63024]: DEBUG nova.objects.instance [None req-0f50873d-b33f-4b16-a72f-34251c73f067 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lazy-loading 'flavor' on Instance uuid c28e7c21-7e7d-4cda-81e8-63538bd8a1f7 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2000.225743] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.251s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2000.227230] env[63024]: INFO nova.compute.claims [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2000.230525] env[63024]: DEBUG nova.network.neutron [-] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2000.232999] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-66b9cbec-44ea-4731-a675-9f5b436c64d1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.241969] env[63024]: DEBUG oslo_vmware.api [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Waiting for the task: (returnval){ [ 2000.241969] env[63024]: value = "task-1951546" [ 2000.241969] env[63024]: _type = "Task" [ 2000.241969] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2000.243600] env[63024]: DEBUG oslo_vmware.api [None req-4f6acf84-01b1-4e66-9eed-dce0ef58b22e tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Waiting for the task: (returnval){ [ 2000.243600] env[63024]: value = "task-1951545" [ 2000.243600] env[63024]: _type = "Task" [ 2000.243600] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2000.260646] env[63024]: DEBUG oslo_vmware.api [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951546, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2000.264634] env[63024]: DEBUG oslo_vmware.api [None req-4f6acf84-01b1-4e66-9eed-dce0ef58b22e tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Task: {'id': task-1951545, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2000.311322] env[63024]: DEBUG oslo_concurrency.lockutils [None req-85666f53-2554-4727-8012-c761cf54ebd6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "fe6847e2-a742-4338-983f-698c13aaefde" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2000.311604] env[63024]: DEBUG oslo_concurrency.lockutils [None req-85666f53-2554-4727-8012-c761cf54ebd6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "fe6847e2-a742-4338-983f-698c13aaefde" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2000.311820] env[63024]: DEBUG nova.compute.manager [None req-85666f53-2554-4727-8012-c761cf54ebd6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Going to confirm migration 5 {{(pid=63024) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5113}} [ 2000.335689] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-97917f89-cea0-42dc-b13e-c1d0c4aa2a8b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.347369] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e81de393-7eb7-4d6e-8f9f-c6a530e8c616 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.384558] env[63024]: DEBUG nova.compute.manager [req-dccced46-ba77-4c6c-84da-d8d5b32d4bea req-dafeb72d-8389-49f2-8754-e34e82758c78 service nova] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Detach interface failed, port_id=71970f6a-12a4-4779-832b-7b00ff046697, reason: Instance c71abfaa-dc65-4d1b-8a34-dff9dd682fe7 could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 2000.677595] env[63024]: INFO nova.compute.manager [-] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Took 1.90 seconds to deallocate network for instance. [ 2000.728547] env[63024]: DEBUG nova.compute.utils [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2000.730956] env[63024]: DEBUG nova.compute.manager [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2000.731368] env[63024]: DEBUG nova.network.neutron [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2000.734022] env[63024]: INFO nova.compute.manager [-] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Took 1.81 seconds to deallocate network for instance. [ 2000.755984] env[63024]: DEBUG oslo_vmware.api [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951546, 'name': ReconfigVM_Task, 'duration_secs': 0.365231} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2000.756705] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Reconfigured VM instance instance-0000005e to attach disk [datastore1] da1f5cbc-47bf-4ee4-837a-b328de170489/da1f5cbc-47bf-4ee4-837a-b328de170489.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2000.757355] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4c26efeb-3071-4e07-b62d-06d28a33b64d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2000.762054] env[63024]: DEBUG oslo_vmware.api [None req-4f6acf84-01b1-4e66-9eed-dce0ef58b22e tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Task: {'id': task-1951545, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141735} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2000.762652] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f6acf84-01b1-4e66-9eed-dce0ef58b22e tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2000.762850] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4f6acf84-01b1-4e66-9eed-dce0ef58b22e tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] [instance: ec1f30e6-8410-4687-958f-f4e6e154b52f] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2000.763127] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4f6acf84-01b1-4e66-9eed-dce0ef58b22e tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] [instance: ec1f30e6-8410-4687-958f-f4e6e154b52f] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2000.763242] env[63024]: INFO nova.compute.manager [None req-4f6acf84-01b1-4e66-9eed-dce0ef58b22e tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] [instance: ec1f30e6-8410-4687-958f-f4e6e154b52f] Took 1.14 seconds to destroy the instance on the hypervisor. [ 2000.763488] env[63024]: DEBUG oslo.service.loopingcall [None req-4f6acf84-01b1-4e66-9eed-dce0ef58b22e tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2000.763677] env[63024]: DEBUG nova.compute.manager [-] [instance: ec1f30e6-8410-4687-958f-f4e6e154b52f] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2000.763772] env[63024]: DEBUG nova.network.neutron [-] [instance: ec1f30e6-8410-4687-958f-f4e6e154b52f] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2000.768573] env[63024]: DEBUG oslo_vmware.api [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Waiting for the task: (returnval){ [ 2000.768573] env[63024]: value = "task-1951547" [ 2000.768573] env[63024]: _type = "Task" [ 2000.768573] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2000.778752] env[63024]: DEBUG oslo_vmware.api [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951547, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2000.783726] env[63024]: DEBUG nova.network.neutron [-] [instance: ec1f30e6-8410-4687-958f-f4e6e154b52f] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2000.798521] env[63024]: DEBUG nova.policy [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a7c32db2d81e40c492c1362d8356a03c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '93098ad83ae144bf90a12c97ec863c06', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 2000.899784] env[63024]: DEBUG oslo_concurrency.lockutils [None req-85666f53-2554-4727-8012-c761cf54ebd6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "refresh_cache-fe6847e2-a742-4338-983f-698c13aaefde" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2000.899968] env[63024]: DEBUG oslo_concurrency.lockutils [None req-85666f53-2554-4727-8012-c761cf54ebd6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquired lock "refresh_cache-fe6847e2-a742-4338-983f-698c13aaefde" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2000.900227] env[63024]: DEBUG nova.network.neutron [None req-85666f53-2554-4727-8012-c761cf54ebd6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2000.900437] env[63024]: DEBUG nova.objects.instance [None req-85666f53-2554-4727-8012-c761cf54ebd6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lazy-loading 'info_cache' on Instance uuid fe6847e2-a742-4338-983f-698c13aaefde {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2001.159089] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Acquiring lock "df5a197c-8e35-44a0-8b9c-63dae50b77ff" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2001.160398] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Lock "df5a197c-8e35-44a0-8b9c-63dae50b77ff" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2001.187064] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4e8b7539-2399-427e-b262-bcb4679c86c0 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2001.238018] env[63024]: DEBUG nova.compute.manager [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2001.238018] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0f50873d-b33f-4b16-a72f-34251c73f067 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "c28e7c21-7e7d-4cda-81e8-63538bd8a1f7" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.397s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2001.246276] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c57faf6e-f683-4795-a9cc-69c242c896a2 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2001.285946] env[63024]: DEBUG nova.network.neutron [-] [instance: ec1f30e6-8410-4687-958f-f4e6e154b52f] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2001.287546] env[63024]: DEBUG oslo_vmware.api [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951547, 'name': Rename_Task, 'duration_secs': 0.163969} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2001.292281] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2001.292993] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f453ebf2-35e9-4e8c-aed8-ecba7e94307a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.306161] env[63024]: DEBUG oslo_vmware.api [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Waiting for the task: (returnval){ [ 2001.306161] env[63024]: value = "task-1951548" [ 2001.306161] env[63024]: _type = "Task" [ 2001.306161] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2001.316320] env[63024]: DEBUG oslo_vmware.api [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951548, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2001.461369] env[63024]: DEBUG nova.network.neutron [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Updating instance_info_cache with network_info: [{"id": "6e0e9732-b318-4b20-ad72-8c2bc07eaf34", "address": "fa:16:3e:2b:cc:65", "network": {"id": "0cbe22f7-6322-4d92-9a77-2753f6449a2d", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-366684254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.141", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6072e8931d9540ad8fe4a2b4b1ec782d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3ccbdbb-8b49-4a26-913f-2a448b72280f", "external-id": "nsx-vlan-transportzone-412", "segmentation_id": 412, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e0e9732-b3", "ovs_interfaceid": "6e0e9732-b318-4b20-ad72-8c2bc07eaf34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2001.542233] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2be02205-f9a8-48b7-bbd5-e6b506acadd1 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "c28e7c21-7e7d-4cda-81e8-63538bd8a1f7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2001.549743] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2be02205-f9a8-48b7-bbd5-e6b506acadd1 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "c28e7c21-7e7d-4cda-81e8-63538bd8a1f7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2001.549743] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2be02205-f9a8-48b7-bbd5-e6b506acadd1 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "c28e7c21-7e7d-4cda-81e8-63538bd8a1f7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2001.549743] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2be02205-f9a8-48b7-bbd5-e6b506acadd1 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "c28e7c21-7e7d-4cda-81e8-63538bd8a1f7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2001.549743] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2be02205-f9a8-48b7-bbd5-e6b506acadd1 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "c28e7c21-7e7d-4cda-81e8-63538bd8a1f7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2001.550147] env[63024]: INFO nova.compute.manager [None req-2be02205-f9a8-48b7-bbd5-e6b506acadd1 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Terminating instance [ 2001.663500] env[63024]: DEBUG nova.compute.manager [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2001.669569] env[63024]: DEBUG nova.network.neutron [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Successfully created port: 0bf9200e-b614-45e7-9926-4e6db7134da3 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2001.689451] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c5332cc-1f68-4cd3-90bd-5066cf2a4c7a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.697968] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1682908-c415-4cd2-8ea4-b09e28a7a0bf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.730899] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07a88ea8-412c-4ef8-97c3-b7e64d8004de {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.744543] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-377c0d70-56ca-4959-9c12-673bc9dc819a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.762878] env[63024]: DEBUG nova.compute.provider_tree [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2001.791968] env[63024]: INFO nova.compute.manager [-] [instance: ec1f30e6-8410-4687-958f-f4e6e154b52f] Took 1.03 seconds to deallocate network for instance. [ 2001.822719] env[63024]: DEBUG oslo_vmware.api [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951548, 'name': PowerOnVM_Task, 'duration_secs': 0.51418} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2001.822993] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2001.823223] env[63024]: INFO nova.compute.manager [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Took 9.07 seconds to spawn the instance on the hypervisor. [ 2001.823423] env[63024]: DEBUG nova.compute.manager [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2001.824256] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8408989c-d7e6-47e1-9526-e66dc39d98ff {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2001.871898] env[63024]: DEBUG nova.network.neutron [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Successfully updated port: ba67287f-e2da-402f-bab6-b8d2b9226aff {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2001.964414] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Releasing lock "refresh_cache-9716d592-32d1-4f1d-b42b-1c8a7d81d2f2" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2001.964594] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Updated the network info_cache for instance {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10329}} [ 2001.964836] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2001.965027] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2001.965185] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2001.965367] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2001.965530] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2001.965682] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2001.965812] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63024) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10877}} [ 2001.965953] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager.update_available_resource {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2002.019466] env[63024]: DEBUG nova.compute.manager [req-0f2d7088-e1aa-41f6-81d1-42324baf8cf5 req-d906aa51-fe3e-4a1f-8021-fbc5430559c5 service nova] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Received event network-vif-deleted-119df22d-0c91-429b-8927-6e0b9a7412f5 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2002.019750] env[63024]: DEBUG nova.compute.manager [req-0f2d7088-e1aa-41f6-81d1-42324baf8cf5 req-d906aa51-fe3e-4a1f-8021-fbc5430559c5 service nova] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Received event network-vif-plugged-ba67287f-e2da-402f-bab6-b8d2b9226aff {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2002.019915] env[63024]: DEBUG oslo_concurrency.lockutils [req-0f2d7088-e1aa-41f6-81d1-42324baf8cf5 req-d906aa51-fe3e-4a1f-8021-fbc5430559c5 service nova] Acquiring lock "a694e49c-37c5-483f-b1d8-5426f6a52b73-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2002.020171] env[63024]: DEBUG oslo_concurrency.lockutils [req-0f2d7088-e1aa-41f6-81d1-42324baf8cf5 req-d906aa51-fe3e-4a1f-8021-fbc5430559c5 service nova] Lock "a694e49c-37c5-483f-b1d8-5426f6a52b73-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2002.020379] env[63024]: DEBUG oslo_concurrency.lockutils [req-0f2d7088-e1aa-41f6-81d1-42324baf8cf5 req-d906aa51-fe3e-4a1f-8021-fbc5430559c5 service nova] Lock "a694e49c-37c5-483f-b1d8-5426f6a52b73-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2002.020552] env[63024]: DEBUG nova.compute.manager [req-0f2d7088-e1aa-41f6-81d1-42324baf8cf5 req-d906aa51-fe3e-4a1f-8021-fbc5430559c5 service nova] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] No waiting events found dispatching network-vif-plugged-ba67287f-e2da-402f-bab6-b8d2b9226aff {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2002.020716] env[63024]: WARNING nova.compute.manager [req-0f2d7088-e1aa-41f6-81d1-42324baf8cf5 req-d906aa51-fe3e-4a1f-8021-fbc5430559c5 service nova] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Received unexpected event network-vif-plugged-ba67287f-e2da-402f-bab6-b8d2b9226aff for instance with vm_state building and task_state spawning. [ 2002.053945] env[63024]: DEBUG nova.compute.manager [None req-2be02205-f9a8-48b7-bbd5-e6b506acadd1 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2002.054184] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-2be02205-f9a8-48b7-bbd5-e6b506acadd1 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2002.055171] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dbc6d6b-7a18-4d96-8f7f-13ea13b27dc8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.063692] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-2be02205-f9a8-48b7-bbd5-e6b506acadd1 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2002.063933] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1b92430b-ae79-405a-a757-93f802f0a76e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.071047] env[63024]: DEBUG oslo_vmware.api [None req-2be02205-f9a8-48b7-bbd5-e6b506acadd1 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 2002.071047] env[63024]: value = "task-1951549" [ 2002.071047] env[63024]: _type = "Task" [ 2002.071047] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2002.079383] env[63024]: DEBUG oslo_vmware.api [None req-2be02205-f9a8-48b7-bbd5-e6b506acadd1 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951549, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2002.190332] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2002.251308] env[63024]: DEBUG nova.compute.manager [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2002.269964] env[63024]: DEBUG nova.scheduler.client.report [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2002.289691] env[63024]: DEBUG nova.virt.hardware [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2002.289691] env[63024]: DEBUG nova.virt.hardware [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2002.289691] env[63024]: DEBUG nova.virt.hardware [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2002.289691] env[63024]: DEBUG nova.virt.hardware [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2002.289691] env[63024]: DEBUG nova.virt.hardware [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2002.289901] env[63024]: DEBUG nova.virt.hardware [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2002.290047] env[63024]: DEBUG nova.virt.hardware [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2002.290429] env[63024]: DEBUG nova.virt.hardware [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2002.290636] env[63024]: DEBUG nova.virt.hardware [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2002.290841] env[63024]: DEBUG nova.virt.hardware [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2002.291461] env[63024]: DEBUG nova.virt.hardware [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2002.291943] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0633e85-114a-4781-9a75-1e7641e82b35 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.298469] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4f6acf84-01b1-4e66-9eed-dce0ef58b22e tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2002.302288] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce2f24bf-c23d-4810-8efd-37241deec8a1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.346037] env[63024]: INFO nova.compute.manager [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Took 36.23 seconds to build instance. [ 2002.374477] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Acquiring lock "refresh_cache-a694e49c-37c5-483f-b1d8-5426f6a52b73" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2002.374631] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Acquired lock "refresh_cache-a694e49c-37c5-483f-b1d8-5426f6a52b73" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2002.374778] env[63024]: DEBUG nova.network.neutron [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2002.449023] env[63024]: DEBUG nova.network.neutron [None req-85666f53-2554-4727-8012-c761cf54ebd6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Updating instance_info_cache with network_info: [{"id": "a86b5113-d05e-45ac-bf54-833ea769eae5", "address": "fa:16:3e:7e:0d:a2", "network": {"id": "27687c23-521d-4beb-ad4f-278994149c2c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-381619610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.138", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e53c02ad56640dc8cbc8839669b67bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "09bf081b-cdf0-4977-abe2-2339a87409ab", "external-id": "nsx-vlan-transportzone-378", "segmentation_id": 378, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa86b5113-d0", "ovs_interfaceid": "a86b5113-d05e-45ac-bf54-833ea769eae5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2002.469117] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2002.581914] env[63024]: DEBUG oslo_vmware.api [None req-2be02205-f9a8-48b7-bbd5-e6b506acadd1 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951549, 'name': PowerOffVM_Task, 'duration_secs': 0.259436} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2002.581914] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-2be02205-f9a8-48b7-bbd5-e6b506acadd1 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2002.582272] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-2be02205-f9a8-48b7-bbd5-e6b506acadd1 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2002.582513] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6d747756-9d19-4e85-991f-a1a3e1970920 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.675398] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-2be02205-f9a8-48b7-bbd5-e6b506acadd1 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2002.675662] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-2be02205-f9a8-48b7-bbd5-e6b506acadd1 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2002.675866] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-2be02205-f9a8-48b7-bbd5-e6b506acadd1 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Deleting the datastore file [datastore1] c28e7c21-7e7d-4cda-81e8-63538bd8a1f7 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2002.676218] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-67fb16d4-052c-43d8-9994-7c9d10c5aef3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2002.684739] env[63024]: DEBUG oslo_vmware.api [None req-2be02205-f9a8-48b7-bbd5-e6b506acadd1 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 2002.684739] env[63024]: value = "task-1951551" [ 2002.684739] env[63024]: _type = "Task" [ 2002.684739] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2002.693265] env[63024]: DEBUG oslo_vmware.api [None req-2be02205-f9a8-48b7-bbd5-e6b506acadd1 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951551, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2002.779112] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.553s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2002.779689] env[63024]: DEBUG nova.compute.manager [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2002.782533] env[63024]: DEBUG oslo_concurrency.lockutils [None req-519aa69f-b02f-485a-bf44-1bfb26c10be8 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.939s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2002.782756] env[63024]: DEBUG nova.objects.instance [None req-519aa69f-b02f-485a-bf44-1bfb26c10be8 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lazy-loading 'resources' on Instance uuid 0f371c69-c7ae-4649-b038-be82e8ca74e1 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2002.848829] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9d2c85c8-3059-4927-b492-aab6d30a3485 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Lock "da1f5cbc-47bf-4ee4-837a-b328de170489" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.742s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2002.938854] env[63024]: DEBUG nova.network.neutron [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2002.954651] env[63024]: DEBUG oslo_concurrency.lockutils [None req-85666f53-2554-4727-8012-c761cf54ebd6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Releasing lock "refresh_cache-fe6847e2-a742-4338-983f-698c13aaefde" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2002.954927] env[63024]: DEBUG nova.objects.instance [None req-85666f53-2554-4727-8012-c761cf54ebd6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lazy-loading 'migration_context' on Instance uuid fe6847e2-a742-4338-983f-698c13aaefde {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2003.179478] env[63024]: DEBUG nova.network.neutron [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Updating instance_info_cache with network_info: [{"id": "ba67287f-e2da-402f-bab6-b8d2b9226aff", "address": "fa:16:3e:b0:fa:b0", "network": {"id": "fb9fab3c-39c4-43b4-ac95-91a662612a7e", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1064954217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a01553100a8f4078bb57a8149179d5d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a69ed1dd-213a-4e30-992a-466735188bf6", "external-id": "nsx-vlan-transportzone-102", "segmentation_id": 102, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba67287f-e2", "ovs_interfaceid": "ba67287f-e2da-402f-bab6-b8d2b9226aff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2003.196298] env[63024]: DEBUG oslo_vmware.api [None req-2be02205-f9a8-48b7-bbd5-e6b506acadd1 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951551, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.15366} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2003.196619] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-2be02205-f9a8-48b7-bbd5-e6b506acadd1 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2003.196735] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-2be02205-f9a8-48b7-bbd5-e6b506acadd1 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2003.196919] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-2be02205-f9a8-48b7-bbd5-e6b506acadd1 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2003.197121] env[63024]: INFO nova.compute.manager [None req-2be02205-f9a8-48b7-bbd5-e6b506acadd1 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Took 1.14 seconds to destroy the instance on the hypervisor. [ 2003.197954] env[63024]: DEBUG oslo.service.loopingcall [None req-2be02205-f9a8-48b7-bbd5-e6b506acadd1 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2003.197954] env[63024]: DEBUG nova.compute.manager [-] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2003.197954] env[63024]: DEBUG nova.network.neutron [-] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2003.286262] env[63024]: DEBUG nova.compute.utils [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2003.292230] env[63024]: DEBUG nova.compute.manager [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2003.292230] env[63024]: DEBUG nova.network.neutron [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2003.418804] env[63024]: DEBUG nova.policy [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6fc84a6eed984429b26a693ce7b0876e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9521048e807c4ca2a6d2e74a72b829a3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 2003.458058] env[63024]: DEBUG nova.objects.base [None req-85666f53-2554-4727-8012-c761cf54ebd6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=63024) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2003.459067] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fb5cbd8-56ab-408c-bea9-9a818e85a39a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.494410] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6965f9cc-be92-459e-9a29-c9d40ec78514 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.501931] env[63024]: DEBUG oslo_vmware.api [None req-85666f53-2554-4727-8012-c761cf54ebd6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2003.501931] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e46357-ef52-23f1-a38b-a4d8e78fe521" [ 2003.501931] env[63024]: _type = "Task" [ 2003.501931] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2003.515536] env[63024]: DEBUG oslo_vmware.api [None req-85666f53-2554-4727-8012-c761cf54ebd6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e46357-ef52-23f1-a38b-a4d8e78fe521, 'name': SearchDatastore_Task, 'duration_secs': 0.008924} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2003.515862] env[63024]: DEBUG oslo_concurrency.lockutils [None req-85666f53-2554-4727-8012-c761cf54ebd6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2003.683434] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Releasing lock "refresh_cache-a694e49c-37c5-483f-b1d8-5426f6a52b73" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2003.683782] env[63024]: DEBUG nova.compute.manager [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Instance network_info: |[{"id": "ba67287f-e2da-402f-bab6-b8d2b9226aff", "address": "fa:16:3e:b0:fa:b0", "network": {"id": "fb9fab3c-39c4-43b4-ac95-91a662612a7e", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1064954217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a01553100a8f4078bb57a8149179d5d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a69ed1dd-213a-4e30-992a-466735188bf6", "external-id": "nsx-vlan-transportzone-102", "segmentation_id": 102, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba67287f-e2", "ovs_interfaceid": "ba67287f-e2da-402f-bab6-b8d2b9226aff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2003.684309] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b0:fa:b0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a69ed1dd-213a-4e30-992a-466735188bf6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ba67287f-e2da-402f-bab6-b8d2b9226aff', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2003.695077] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Creating folder: Project (a01553100a8f4078bb57a8149179d5d1). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2003.698446] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b5c78234-db77-4980-bd96-6c630709018f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.716121] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Created folder: Project (a01553100a8f4078bb57a8149179d5d1) in parent group-v401959. [ 2003.716446] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Creating folder: Instances. Parent ref: group-v402215. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2003.716762] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d26b6ba4-51f6-45a7-93e9-72a6c13179fd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.731298] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Created folder: Instances in parent group-v402215. [ 2003.731523] env[63024]: DEBUG oslo.service.loopingcall [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2003.731803] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2003.734018] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-717f3ee9-ef97-4f9c-8549-ddc1d5f56480 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.757835] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2003.757835] env[63024]: value = "task-1951554" [ 2003.757835] env[63024]: _type = "Task" [ 2003.757835] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2003.764474] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf3ba62b-29bb-45b6-a2cf-9f20f2d344ef {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.770664] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951554, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2003.775826] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e351cec-ab83-43d2-89e1-16427a2025cb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.818259] env[63024]: DEBUG nova.compute.manager [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2003.823346] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3b76fd0-2fb6-4711-95d2-112ea760e8d7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.834707] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a963298f-8464-48b8-ad54-758145420ea8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2003.854612] env[63024]: DEBUG nova.compute.provider_tree [None req-519aa69f-b02f-485a-bf44-1bfb26c10be8 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2004.268768] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951554, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2004.364617] env[63024]: DEBUG nova.network.neutron [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Successfully updated port: 0bf9200e-b614-45e7-9926-4e6db7134da3 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2004.386724] env[63024]: ERROR nova.scheduler.client.report [None req-519aa69f-b02f-485a-bf44-1bfb26c10be8 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [req-2a9fe5b9-19a4-4489-af4e-dbf2e91260fd] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 89dfa68a-133e-436f-a9f1-86051f9fb96b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-2a9fe5b9-19a4-4489-af4e-dbf2e91260fd"}]} [ 2004.405535] env[63024]: DEBUG nova.scheduler.client.report [None req-519aa69f-b02f-485a-bf44-1bfb26c10be8 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Refreshing inventories for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 2004.408754] env[63024]: DEBUG nova.network.neutron [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Successfully created port: 041c096f-ef1b-49ad-aadb-469b89fe4f25 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2004.419018] env[63024]: DEBUG nova.compute.manager [req-6c6c9b86-1fad-47e4-84bf-ca8af224058f req-ce144053-fdc0-4c5b-8d71-c2d4b8fb7f5c service nova] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Received event network-changed-ba67287f-e2da-402f-bab6-b8d2b9226aff {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2004.419221] env[63024]: DEBUG nova.compute.manager [req-6c6c9b86-1fad-47e4-84bf-ca8af224058f req-ce144053-fdc0-4c5b-8d71-c2d4b8fb7f5c service nova] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Refreshing instance network info cache due to event network-changed-ba67287f-e2da-402f-bab6-b8d2b9226aff. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2004.419440] env[63024]: DEBUG oslo_concurrency.lockutils [req-6c6c9b86-1fad-47e4-84bf-ca8af224058f req-ce144053-fdc0-4c5b-8d71-c2d4b8fb7f5c service nova] Acquiring lock "refresh_cache-a694e49c-37c5-483f-b1d8-5426f6a52b73" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2004.419584] env[63024]: DEBUG oslo_concurrency.lockutils [req-6c6c9b86-1fad-47e4-84bf-ca8af224058f req-ce144053-fdc0-4c5b-8d71-c2d4b8fb7f5c service nova] Acquired lock "refresh_cache-a694e49c-37c5-483f-b1d8-5426f6a52b73" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2004.419745] env[63024]: DEBUG nova.network.neutron [req-6c6c9b86-1fad-47e4-84bf-ca8af224058f req-ce144053-fdc0-4c5b-8d71-c2d4b8fb7f5c service nova] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Refreshing network info cache for port ba67287f-e2da-402f-bab6-b8d2b9226aff {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2004.432768] env[63024]: DEBUG nova.scheduler.client.report [None req-519aa69f-b02f-485a-bf44-1bfb26c10be8 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Updating ProviderTree inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 2004.433038] env[63024]: DEBUG nova.compute.provider_tree [None req-519aa69f-b02f-485a-bf44-1bfb26c10be8 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2004.447284] env[63024]: DEBUG nova.scheduler.client.report [None req-519aa69f-b02f-485a-bf44-1bfb26c10be8 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Refreshing aggregate associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, aggregates: None {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 2004.467033] env[63024]: DEBUG nova.scheduler.client.report [None req-519aa69f-b02f-485a-bf44-1bfb26c10be8 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Refreshing trait associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 2004.776066] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951554, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2004.829526] env[63024]: DEBUG nova.compute.manager [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2004.862132] env[63024]: DEBUG nova.virt.hardware [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2004.862132] env[63024]: DEBUG nova.virt.hardware [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2004.862132] env[63024]: DEBUG nova.virt.hardware [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2004.862132] env[63024]: DEBUG nova.virt.hardware [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2004.862132] env[63024]: DEBUG nova.virt.hardware [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2004.862132] env[63024]: DEBUG nova.virt.hardware [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2004.862780] env[63024]: DEBUG nova.virt.hardware [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2004.863134] env[63024]: DEBUG nova.virt.hardware [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2004.863472] env[63024]: DEBUG nova.virt.hardware [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2004.863785] env[63024]: DEBUG nova.virt.hardware [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2004.864140] env[63024]: DEBUG nova.virt.hardware [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2004.865270] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dccb8221-7f2a-4882-8570-50c46650b30a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.873483] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "refresh_cache-50c72c53-ff72-42e6-afdc-14e0ac64f490" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2004.873801] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquired lock "refresh_cache-50c72c53-ff72-42e6-afdc-14e0ac64f490" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2004.874096] env[63024]: DEBUG nova.network.neutron [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2004.887683] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9daea9c5-96fa-4002-af00-d2e4544c21c3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.959672] env[63024]: DEBUG nova.network.neutron [-] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2004.974719] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e00afefa-7451-4db6-b994-842ff5d66695 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2004.984724] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f137327-2f54-4193-bf17-a4fa3ea29950 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.018023] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c03c169-f266-4e41-ba5c-82db32d36050 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.027386] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90b74184-547e-40b0-8aa0-a14b6791350c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.044354] env[63024]: DEBUG nova.compute.manager [None req-d80c0d46-f24b-4fb5-83b2-b55dcf890a2e tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2005.045266] env[63024]: DEBUG nova.compute.provider_tree [None req-519aa69f-b02f-485a-bf44-1bfb26c10be8 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2005.047063] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ea4eaf4-550e-4f51-afe8-f9c849f32f97 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.272830] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951554, 'name': CreateVM_Task, 'duration_secs': 1.506565} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2005.273119] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2005.274109] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2005.274284] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2005.274630] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2005.274907] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25210fa1-91b1-43d3-af7b-dd4089e36f41 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.280601] env[63024]: DEBUG oslo_vmware.api [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Waiting for the task: (returnval){ [ 2005.280601] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52db7785-0c09-2e8b-8227-0a8679f4fb1a" [ 2005.280601] env[63024]: _type = "Task" [ 2005.280601] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2005.289219] env[63024]: DEBUG oslo_vmware.api [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52db7785-0c09-2e8b-8227-0a8679f4fb1a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2005.341429] env[63024]: DEBUG nova.network.neutron [req-6c6c9b86-1fad-47e4-84bf-ca8af224058f req-ce144053-fdc0-4c5b-8d71-c2d4b8fb7f5c service nova] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Updated VIF entry in instance network info cache for port ba67287f-e2da-402f-bab6-b8d2b9226aff. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2005.341860] env[63024]: DEBUG nova.network.neutron [req-6c6c9b86-1fad-47e4-84bf-ca8af224058f req-ce144053-fdc0-4c5b-8d71-c2d4b8fb7f5c service nova] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Updating instance_info_cache with network_info: [{"id": "ba67287f-e2da-402f-bab6-b8d2b9226aff", "address": "fa:16:3e:b0:fa:b0", "network": {"id": "fb9fab3c-39c4-43b4-ac95-91a662612a7e", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-1064954217-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a01553100a8f4078bb57a8149179d5d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a69ed1dd-213a-4e30-992a-466735188bf6", "external-id": "nsx-vlan-transportzone-102", "segmentation_id": 102, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapba67287f-e2", "ovs_interfaceid": "ba67287f-e2da-402f-bab6-b8d2b9226aff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2005.404403] env[63024]: DEBUG nova.network.neutron [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2005.465064] env[63024]: INFO nova.compute.manager [-] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Took 2.27 seconds to deallocate network for instance. [ 2005.559230] env[63024]: INFO nova.compute.manager [None req-d80c0d46-f24b-4fb5-83b2-b55dcf890a2e tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] instance snapshotting [ 2005.562022] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-171eaac8-7c03-4f82-a066-a73e9008cca4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.583330] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-338eec2a-8c2a-4e8a-8c89-d2febd01da1d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.586853] env[63024]: DEBUG nova.scheduler.client.report [None req-519aa69f-b02f-485a-bf44-1bfb26c10be8 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Updated inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with generation 144 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 2005.587113] env[63024]: DEBUG nova.compute.provider_tree [None req-519aa69f-b02f-485a-bf44-1bfb26c10be8 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Updating resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b generation from 144 to 145 during operation: update_inventory {{(pid=63024) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2005.587299] env[63024]: DEBUG nova.compute.provider_tree [None req-519aa69f-b02f-485a-bf44-1bfb26c10be8 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2005.655327] env[63024]: DEBUG nova.network.neutron [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Updating instance_info_cache with network_info: [{"id": "0bf9200e-b614-45e7-9926-4e6db7134da3", "address": "fa:16:3e:b6:3f:d1", "network": {"id": "c25a78c5-2c6d-4c87-8575-9620fbc983bd", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1693993539-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93098ad83ae144bf90a12c97ec863c06", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e66c4ebe-f808-4b34-bdb5-6c45edb1736f", "external-id": "cl2-zone-719", "segmentation_id": 719, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0bf9200e-b6", "ovs_interfaceid": "0bf9200e-b614-45e7-9926-4e6db7134da3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2005.807342] env[63024]: DEBUG oslo_vmware.api [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52db7785-0c09-2e8b-8227-0a8679f4fb1a, 'name': SearchDatastore_Task, 'duration_secs': 0.012065} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2005.807691] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2005.807930] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2005.808191] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2005.808444] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2005.808546] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2005.809163] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-415e0d52-56d1-4cac-8c58-e81f67ec8edd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.818461] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2005.818683] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2005.819402] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6282e2c-c12a-45a1-b0cb-95c9264ca055 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2005.827031] env[63024]: DEBUG oslo_vmware.api [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Waiting for the task: (returnval){ [ 2005.827031] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52d29d18-bcc3-e911-9fd8-e23305b69e1c" [ 2005.827031] env[63024]: _type = "Task" [ 2005.827031] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2005.834593] env[63024]: DEBUG oslo_vmware.api [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52d29d18-bcc3-e911-9fd8-e23305b69e1c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2005.845250] env[63024]: DEBUG oslo_concurrency.lockutils [req-6c6c9b86-1fad-47e4-84bf-ca8af224058f req-ce144053-fdc0-4c5b-8d71-c2d4b8fb7f5c service nova] Releasing lock "refresh_cache-a694e49c-37c5-483f-b1d8-5426f6a52b73" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2005.845566] env[63024]: DEBUG nova.compute.manager [req-6c6c9b86-1fad-47e4-84bf-ca8af224058f req-ce144053-fdc0-4c5b-8d71-c2d4b8fb7f5c service nova] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Received event network-vif-plugged-0bf9200e-b614-45e7-9926-4e6db7134da3 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2005.845859] env[63024]: DEBUG oslo_concurrency.lockutils [req-6c6c9b86-1fad-47e4-84bf-ca8af224058f req-ce144053-fdc0-4c5b-8d71-c2d4b8fb7f5c service nova] Acquiring lock "50c72c53-ff72-42e6-afdc-14e0ac64f490-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2005.846134] env[63024]: DEBUG oslo_concurrency.lockutils [req-6c6c9b86-1fad-47e4-84bf-ca8af224058f req-ce144053-fdc0-4c5b-8d71-c2d4b8fb7f5c service nova] Lock "50c72c53-ff72-42e6-afdc-14e0ac64f490-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2005.846316] env[63024]: DEBUG oslo_concurrency.lockutils [req-6c6c9b86-1fad-47e4-84bf-ca8af224058f req-ce144053-fdc0-4c5b-8d71-c2d4b8fb7f5c service nova] Lock "50c72c53-ff72-42e6-afdc-14e0ac64f490-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2005.846490] env[63024]: DEBUG nova.compute.manager [req-6c6c9b86-1fad-47e4-84bf-ca8af224058f req-ce144053-fdc0-4c5b-8d71-c2d4b8fb7f5c service nova] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] No waiting events found dispatching network-vif-plugged-0bf9200e-b614-45e7-9926-4e6db7134da3 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2005.846663] env[63024]: WARNING nova.compute.manager [req-6c6c9b86-1fad-47e4-84bf-ca8af224058f req-ce144053-fdc0-4c5b-8d71-c2d4b8fb7f5c service nova] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Received unexpected event network-vif-plugged-0bf9200e-b614-45e7-9926-4e6db7134da3 for instance with vm_state building and task_state spawning. [ 2005.971957] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2be02205-f9a8-48b7-bbd5-e6b506acadd1 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2006.095345] env[63024]: DEBUG oslo_concurrency.lockutils [None req-519aa69f-b02f-485a-bf44-1bfb26c10be8 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.313s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2006.098882] env[63024]: DEBUG oslo_concurrency.lockutils [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.661s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2006.098882] env[63024]: DEBUG nova.objects.instance [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Lazy-loading 'resources' on Instance uuid 92d1f96e-bbe7-4654-9d3a-47ba40057157 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2006.105385] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d80c0d46-f24b-4fb5-83b2-b55dcf890a2e tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Creating Snapshot of the VM instance {{(pid=63024) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2006.105687] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-5313c5d0-0642-4d4d-92bb-ce27eb344093 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.115721] env[63024]: DEBUG oslo_vmware.api [None req-d80c0d46-f24b-4fb5-83b2-b55dcf890a2e tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Waiting for the task: (returnval){ [ 2006.115721] env[63024]: value = "task-1951555" [ 2006.115721] env[63024]: _type = "Task" [ 2006.115721] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2006.120400] env[63024]: INFO nova.scheduler.client.report [None req-519aa69f-b02f-485a-bf44-1bfb26c10be8 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Deleted allocations for instance 0f371c69-c7ae-4649-b038-be82e8ca74e1 [ 2006.127047] env[63024]: DEBUG oslo_vmware.api [None req-d80c0d46-f24b-4fb5-83b2-b55dcf890a2e tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951555, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2006.158384] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Releasing lock "refresh_cache-50c72c53-ff72-42e6-afdc-14e0ac64f490" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2006.158801] env[63024]: DEBUG nova.compute.manager [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Instance network_info: |[{"id": "0bf9200e-b614-45e7-9926-4e6db7134da3", "address": "fa:16:3e:b6:3f:d1", "network": {"id": "c25a78c5-2c6d-4c87-8575-9620fbc983bd", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1693993539-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93098ad83ae144bf90a12c97ec863c06", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e66c4ebe-f808-4b34-bdb5-6c45edb1736f", "external-id": "cl2-zone-719", "segmentation_id": 719, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0bf9200e-b6", "ovs_interfaceid": "0bf9200e-b614-45e7-9926-4e6db7134da3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2006.159379] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b6:3f:d1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e66c4ebe-f808-4b34-bdb5-6c45edb1736f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0bf9200e-b614-45e7-9926-4e6db7134da3', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2006.167937] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Creating folder: Project (93098ad83ae144bf90a12c97ec863c06). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2006.168283] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8367a374-e077-46f1-9b06-03e3b746e417 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.181824] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Created folder: Project (93098ad83ae144bf90a12c97ec863c06) in parent group-v401959. [ 2006.182052] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Creating folder: Instances. Parent ref: group-v402218. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2006.182367] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a40a032a-4efe-4ffa-b7d3-cb9b8f04b109 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.193355] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Created folder: Instances in parent group-v402218. [ 2006.193641] env[63024]: DEBUG oslo.service.loopingcall [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2006.193824] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2006.194064] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b31eb762-26ae-4c7d-9931-7038139106e5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.215465] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2006.215465] env[63024]: value = "task-1951558" [ 2006.215465] env[63024]: _type = "Task" [ 2006.215465] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2006.224032] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951558, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2006.293733] env[63024]: DEBUG nova.network.neutron [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Successfully updated port: 041c096f-ef1b-49ad-aadb-469b89fe4f25 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2006.339331] env[63024]: DEBUG oslo_vmware.api [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52d29d18-bcc3-e911-9fd8-e23305b69e1c, 'name': SearchDatastore_Task, 'duration_secs': 0.012411} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2006.341104] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6fd0aef4-29a8-4362-9cc4-4587ddeb5213 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.349490] env[63024]: DEBUG oslo_vmware.api [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Waiting for the task: (returnval){ [ 2006.349490] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5291963c-935d-5407-7dda-867aa8882ee3" [ 2006.349490] env[63024]: _type = "Task" [ 2006.349490] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2006.358170] env[63024]: DEBUG oslo_vmware.api [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5291963c-935d-5407-7dda-867aa8882ee3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2006.504538] env[63024]: DEBUG nova.compute.manager [req-463a474f-25e5-426e-a948-1bd19cfd4b95 req-00765a38-6c98-4559-b846-7d37254c4a65 service nova] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Received event network-changed-0bf9200e-b614-45e7-9926-4e6db7134da3 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2006.505153] env[63024]: DEBUG nova.compute.manager [req-463a474f-25e5-426e-a948-1bd19cfd4b95 req-00765a38-6c98-4559-b846-7d37254c4a65 service nova] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Refreshing instance network info cache due to event network-changed-0bf9200e-b614-45e7-9926-4e6db7134da3. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2006.505651] env[63024]: DEBUG oslo_concurrency.lockutils [req-463a474f-25e5-426e-a948-1bd19cfd4b95 req-00765a38-6c98-4559-b846-7d37254c4a65 service nova] Acquiring lock "refresh_cache-50c72c53-ff72-42e6-afdc-14e0ac64f490" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2006.506057] env[63024]: DEBUG oslo_concurrency.lockutils [req-463a474f-25e5-426e-a948-1bd19cfd4b95 req-00765a38-6c98-4559-b846-7d37254c4a65 service nova] Acquired lock "refresh_cache-50c72c53-ff72-42e6-afdc-14e0ac64f490" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2006.506484] env[63024]: DEBUG nova.network.neutron [req-463a474f-25e5-426e-a948-1bd19cfd4b95 req-00765a38-6c98-4559-b846-7d37254c4a65 service nova] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Refreshing network info cache for port 0bf9200e-b614-45e7-9926-4e6db7134da3 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2006.601692] env[63024]: DEBUG nova.objects.instance [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Lazy-loading 'numa_topology' on Instance uuid 92d1f96e-bbe7-4654-9d3a-47ba40057157 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2006.625955] env[63024]: DEBUG oslo_vmware.api [None req-d80c0d46-f24b-4fb5-83b2-b55dcf890a2e tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951555, 'name': CreateSnapshot_Task, 'duration_secs': 0.488485} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2006.626484] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d80c0d46-f24b-4fb5-83b2-b55dcf890a2e tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Created Snapshot of the VM instance {{(pid=63024) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2006.627372] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e1043ea-902c-4030-a77e-e5ffaafb7e7d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.632706] env[63024]: DEBUG oslo_concurrency.lockutils [None req-519aa69f-b02f-485a-bf44-1bfb26c10be8 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "0f371c69-c7ae-4649-b038-be82e8ca74e1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.772s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2006.726235] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951558, 'name': CreateVM_Task, 'duration_secs': 0.440471} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2006.726373] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2006.727129] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2006.727303] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2006.727624] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2006.727886] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cff0d145-908c-49c0-bc0e-0ce90afbad02 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.733487] env[63024]: DEBUG oslo_vmware.api [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2006.733487] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52988837-7adf-409c-5eaf-597099224348" [ 2006.733487] env[63024]: _type = "Task" [ 2006.733487] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2006.743410] env[63024]: DEBUG oslo_vmware.api [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52988837-7adf-409c-5eaf-597099224348, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2006.796891] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "refresh_cache-34e4db8e-e0d9-4a27-9368-c5e711b51a29" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2006.797085] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquired lock "refresh_cache-34e4db8e-e0d9-4a27-9368-c5e711b51a29" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2006.797243] env[63024]: DEBUG nova.network.neutron [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2006.858261] env[63024]: DEBUG oslo_vmware.api [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5291963c-935d-5407-7dda-867aa8882ee3, 'name': SearchDatastore_Task, 'duration_secs': 0.010873} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2006.858590] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2006.858930] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] a694e49c-37c5-483f-b1d8-5426f6a52b73/a694e49c-37c5-483f-b1d8-5426f6a52b73.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2006.859139] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-deea2089-45f8-46ff-a8b2-2b1ad52cb1a0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2006.867384] env[63024]: DEBUG oslo_vmware.api [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Waiting for the task: (returnval){ [ 2006.867384] env[63024]: value = "task-1951559" [ 2006.867384] env[63024]: _type = "Task" [ 2006.867384] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2006.878069] env[63024]: DEBUG oslo_vmware.api [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Task: {'id': task-1951559, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2007.104782] env[63024]: DEBUG nova.objects.base [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Object Instance<92d1f96e-bbe7-4654-9d3a-47ba40057157> lazy-loaded attributes: resources,numa_topology {{(pid=63024) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2007.148784] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d80c0d46-f24b-4fb5-83b2-b55dcf890a2e tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Creating linked-clone VM from snapshot {{(pid=63024) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2007.152398] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-282d511c-870d-4056-a8b2-b2abc1c3eef4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.163134] env[63024]: DEBUG oslo_vmware.api [None req-d80c0d46-f24b-4fb5-83b2-b55dcf890a2e tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Waiting for the task: (returnval){ [ 2007.163134] env[63024]: value = "task-1951560" [ 2007.163134] env[63024]: _type = "Task" [ 2007.163134] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2007.175136] env[63024]: DEBUG oslo_vmware.api [None req-d80c0d46-f24b-4fb5-83b2-b55dcf890a2e tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951560, 'name': CloneVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2007.248634] env[63024]: DEBUG oslo_vmware.api [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52988837-7adf-409c-5eaf-597099224348, 'name': SearchDatastore_Task, 'duration_secs': 0.010393} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2007.251367] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2007.251653] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2007.251895] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2007.252056] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2007.252275] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2007.252845] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6ae7fb41-4b94-48c4-89bf-3a63f8306dc8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.269610] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2007.269834] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2007.270641] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-acbbd785-8d9b-48a1-970b-1c9db17a750d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.277697] env[63024]: DEBUG oslo_vmware.api [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2007.277697] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52c46232-a38b-5a69-6abf-90497c2cc7d1" [ 2007.277697] env[63024]: _type = "Task" [ 2007.277697] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2007.290515] env[63024]: DEBUG oslo_vmware.api [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52c46232-a38b-5a69-6abf-90497c2cc7d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2007.384972] env[63024]: DEBUG oslo_vmware.api [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Task: {'id': task-1951559, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.473131} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2007.387719] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] a694e49c-37c5-483f-b1d8-5426f6a52b73/a694e49c-37c5-483f-b1d8-5426f6a52b73.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2007.388324] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2007.389941] env[63024]: DEBUG nova.network.neutron [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2007.392677] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b789a77c-8626-49c9-bb24-224cefab7926 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.403034] env[63024]: DEBUG oslo_vmware.api [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Waiting for the task: (returnval){ [ 2007.403034] env[63024]: value = "task-1951561" [ 2007.403034] env[63024]: _type = "Task" [ 2007.403034] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2007.416215] env[63024]: DEBUG oslo_vmware.api [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Task: {'id': task-1951561, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2007.541628] env[63024]: DEBUG nova.network.neutron [req-463a474f-25e5-426e-a948-1bd19cfd4b95 req-00765a38-6c98-4559-b846-7d37254c4a65 service nova] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Updated VIF entry in instance network info cache for port 0bf9200e-b614-45e7-9926-4e6db7134da3. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2007.542022] env[63024]: DEBUG nova.network.neutron [req-463a474f-25e5-426e-a948-1bd19cfd4b95 req-00765a38-6c98-4559-b846-7d37254c4a65 service nova] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Updating instance_info_cache with network_info: [{"id": "0bf9200e-b614-45e7-9926-4e6db7134da3", "address": "fa:16:3e:b6:3f:d1", "network": {"id": "c25a78c5-2c6d-4c87-8575-9620fbc983bd", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1693993539-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93098ad83ae144bf90a12c97ec863c06", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e66c4ebe-f808-4b34-bdb5-6c45edb1736f", "external-id": "cl2-zone-719", "segmentation_id": 719, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0bf9200e-b6", "ovs_interfaceid": "0bf9200e-b614-45e7-9926-4e6db7134da3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2007.654726] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c54146f-f17c-4a00-91b7-9774bae1e8d7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.664328] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84457ff5-7988-4db4-8ab0-f98090a07990 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.675760] env[63024]: DEBUG oslo_vmware.api [None req-d80c0d46-f24b-4fb5-83b2-b55dcf890a2e tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951560, 'name': CloneVM_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2007.702838] env[63024]: DEBUG nova.network.neutron [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Updating instance_info_cache with network_info: [{"id": "041c096f-ef1b-49ad-aadb-469b89fe4f25", "address": "fa:16:3e:8b:4e:6d", "network": {"id": "ffb24eaf-c6b6-414f-a69a-0c8806712ddd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-281590202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9521048e807c4ca2a6d2e74a72b829a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap041c096f-ef", "ovs_interfaceid": "041c096f-ef1b-49ad-aadb-469b89fe4f25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2007.708017] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03324b3a-8264-4a0c-8a9b-ec997005c286 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.713261] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-203b98d1-c565-444f-a42b-993677f7dac5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.728373] env[63024]: DEBUG nova.compute.provider_tree [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2007.806917] env[63024]: DEBUG oslo_vmware.api [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52c46232-a38b-5a69-6abf-90497c2cc7d1, 'name': SearchDatastore_Task, 'duration_secs': 0.045021} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2007.807739] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-172e2819-cce5-4520-8e80-ed6dbd1616f1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.815897] env[63024]: DEBUG oslo_vmware.api [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2007.815897] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52242bbf-ae90-c555-105d-fb710efa389c" [ 2007.815897] env[63024]: _type = "Task" [ 2007.815897] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2007.824635] env[63024]: DEBUG oslo_vmware.api [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52242bbf-ae90-c555-105d-fb710efa389c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2007.913747] env[63024]: DEBUG oslo_vmware.api [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Task: {'id': task-1951561, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084477} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2007.914325] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2007.915167] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-132aae0f-7c3d-4f16-bd8e-308bfdf9148d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.939067] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Reconfiguring VM instance instance-0000005f to attach disk [datastore1] a694e49c-37c5-483f-b1d8-5426f6a52b73/a694e49c-37c5-483f-b1d8-5426f6a52b73.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2007.939366] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c56e8493-350c-4c62-a1c2-3c22340f41bc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2007.961260] env[63024]: DEBUG oslo_vmware.api [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Waiting for the task: (returnval){ [ 2007.961260] env[63024]: value = "task-1951562" [ 2007.961260] env[63024]: _type = "Task" [ 2007.961260] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2007.969672] env[63024]: DEBUG oslo_vmware.api [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Task: {'id': task-1951562, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2008.045447] env[63024]: DEBUG oslo_concurrency.lockutils [req-463a474f-25e5-426e-a948-1bd19cfd4b95 req-00765a38-6c98-4559-b846-7d37254c4a65 service nova] Releasing lock "refresh_cache-50c72c53-ff72-42e6-afdc-14e0ac64f490" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2008.045447] env[63024]: DEBUG nova.compute.manager [req-463a474f-25e5-426e-a948-1bd19cfd4b95 req-00765a38-6c98-4559-b846-7d37254c4a65 service nova] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Received event network-vif-deleted-749aba46-5057-4a6a-8e7c-f7df42b7d129 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2008.045651] env[63024]: DEBUG nova.compute.manager [req-463a474f-25e5-426e-a948-1bd19cfd4b95 req-00765a38-6c98-4559-b846-7d37254c4a65 service nova] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Received event network-vif-plugged-041c096f-ef1b-49ad-aadb-469b89fe4f25 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2008.045846] env[63024]: DEBUG oslo_concurrency.lockutils [req-463a474f-25e5-426e-a948-1bd19cfd4b95 req-00765a38-6c98-4559-b846-7d37254c4a65 service nova] Acquiring lock "34e4db8e-e0d9-4a27-9368-c5e711b51a29-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2008.046074] env[63024]: DEBUG oslo_concurrency.lockutils [req-463a474f-25e5-426e-a948-1bd19cfd4b95 req-00765a38-6c98-4559-b846-7d37254c4a65 service nova] Lock "34e4db8e-e0d9-4a27-9368-c5e711b51a29-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2008.046244] env[63024]: DEBUG oslo_concurrency.lockutils [req-463a474f-25e5-426e-a948-1bd19cfd4b95 req-00765a38-6c98-4559-b846-7d37254c4a65 service nova] Lock "34e4db8e-e0d9-4a27-9368-c5e711b51a29-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2008.046411] env[63024]: DEBUG nova.compute.manager [req-463a474f-25e5-426e-a948-1bd19cfd4b95 req-00765a38-6c98-4559-b846-7d37254c4a65 service nova] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] No waiting events found dispatching network-vif-plugged-041c096f-ef1b-49ad-aadb-469b89fe4f25 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2008.046577] env[63024]: WARNING nova.compute.manager [req-463a474f-25e5-426e-a948-1bd19cfd4b95 req-00765a38-6c98-4559-b846-7d37254c4a65 service nova] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Received unexpected event network-vif-plugged-041c096f-ef1b-49ad-aadb-469b89fe4f25 for instance with vm_state building and task_state spawning. [ 2008.046741] env[63024]: DEBUG nova.compute.manager [req-463a474f-25e5-426e-a948-1bd19cfd4b95 req-00765a38-6c98-4559-b846-7d37254c4a65 service nova] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Received event network-changed-041c096f-ef1b-49ad-aadb-469b89fe4f25 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2008.046896] env[63024]: DEBUG nova.compute.manager [req-463a474f-25e5-426e-a948-1bd19cfd4b95 req-00765a38-6c98-4559-b846-7d37254c4a65 service nova] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Refreshing instance network info cache due to event network-changed-041c096f-ef1b-49ad-aadb-469b89fe4f25. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2008.047080] env[63024]: DEBUG oslo_concurrency.lockutils [req-463a474f-25e5-426e-a948-1bd19cfd4b95 req-00765a38-6c98-4559-b846-7d37254c4a65 service nova] Acquiring lock "refresh_cache-34e4db8e-e0d9-4a27-9368-c5e711b51a29" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2008.178652] env[63024]: DEBUG oslo_vmware.api [None req-d80c0d46-f24b-4fb5-83b2-b55dcf890a2e tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951560, 'name': CloneVM_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2008.209114] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Releasing lock "refresh_cache-34e4db8e-e0d9-4a27-9368-c5e711b51a29" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2008.209114] env[63024]: DEBUG nova.compute.manager [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Instance network_info: |[{"id": "041c096f-ef1b-49ad-aadb-469b89fe4f25", "address": "fa:16:3e:8b:4e:6d", "network": {"id": "ffb24eaf-c6b6-414f-a69a-0c8806712ddd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-281590202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9521048e807c4ca2a6d2e74a72b829a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap041c096f-ef", "ovs_interfaceid": "041c096f-ef1b-49ad-aadb-469b89fe4f25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2008.209114] env[63024]: DEBUG oslo_concurrency.lockutils [req-463a474f-25e5-426e-a948-1bd19cfd4b95 req-00765a38-6c98-4559-b846-7d37254c4a65 service nova] Acquired lock "refresh_cache-34e4db8e-e0d9-4a27-9368-c5e711b51a29" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2008.209350] env[63024]: DEBUG nova.network.neutron [req-463a474f-25e5-426e-a948-1bd19cfd4b95 req-00765a38-6c98-4559-b846-7d37254c4a65 service nova] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Refreshing network info cache for port 041c096f-ef1b-49ad-aadb-469b89fe4f25 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2008.211100] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8b:4e:6d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e445fb59-822c-4d7d-943b-c8e3bbaca62e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '041c096f-ef1b-49ad-aadb-469b89fe4f25', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2008.218589] env[63024]: DEBUG oslo.service.loopingcall [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2008.219738] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2008.222508] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1c21b108-ae6b-4b42-bba4-d8323ef85e84 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.263870] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2008.263870] env[63024]: value = "task-1951563" [ 2008.263870] env[63024]: _type = "Task" [ 2008.263870] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2008.279252] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951563, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2008.282361] env[63024]: DEBUG nova.scheduler.client.report [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Updated inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with generation 145 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 2008.282361] env[63024]: DEBUG nova.compute.provider_tree [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Updating resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b generation from 145 to 146 during operation: update_inventory {{(pid=63024) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2008.282361] env[63024]: DEBUG nova.compute.provider_tree [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2008.330983] env[63024]: DEBUG oslo_vmware.api [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52242bbf-ae90-c555-105d-fb710efa389c, 'name': SearchDatastore_Task, 'duration_secs': 0.010804} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2008.331433] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2008.331919] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 50c72c53-ff72-42e6-afdc-14e0ac64f490/50c72c53-ff72-42e6-afdc-14e0ac64f490.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2008.332075] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d79dda10-dc69-4f5f-b250-908063fdc159 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.341346] env[63024]: DEBUG oslo_vmware.api [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2008.341346] env[63024]: value = "task-1951564" [ 2008.341346] env[63024]: _type = "Task" [ 2008.341346] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2008.351362] env[63024]: DEBUG oslo_vmware.api [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951564, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2008.471534] env[63024]: DEBUG oslo_vmware.api [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Task: {'id': task-1951562, 'name': ReconfigVM_Task, 'duration_secs': 0.330478} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2008.471887] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Reconfigured VM instance instance-0000005f to attach disk [datastore1] a694e49c-37c5-483f-b1d8-5426f6a52b73/a694e49c-37c5-483f-b1d8-5426f6a52b73.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2008.472639] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a3ebc69c-04ba-4e3a-bb4e-5148c404e5b5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2008.483470] env[63024]: DEBUG oslo_vmware.api [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Waiting for the task: (returnval){ [ 2008.483470] env[63024]: value = "task-1951565" [ 2008.483470] env[63024]: _type = "Task" [ 2008.483470] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2008.495110] env[63024]: DEBUG oslo_vmware.api [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Task: {'id': task-1951565, 'name': Rename_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2008.680219] env[63024]: DEBUG oslo_vmware.api [None req-d80c0d46-f24b-4fb5-83b2-b55dcf890a2e tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951560, 'name': CloneVM_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2008.754166] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3faa85ed-242c-42ff-afb4-990743c934db tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Acquiring lock "73db94b8-cfa8-4457-bccb-d4b780edbd93" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2008.754445] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3faa85ed-242c-42ff-afb4-990743c934db tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lock "73db94b8-cfa8-4457-bccb-d4b780edbd93" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2008.754680] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3faa85ed-242c-42ff-afb4-990743c934db tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Acquiring lock "73db94b8-cfa8-4457-bccb-d4b780edbd93-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2008.754978] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3faa85ed-242c-42ff-afb4-990743c934db tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lock "73db94b8-cfa8-4457-bccb-d4b780edbd93-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2008.755074] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3faa85ed-242c-42ff-afb4-990743c934db tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lock "73db94b8-cfa8-4457-bccb-d4b780edbd93-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2008.757293] env[63024]: INFO nova.compute.manager [None req-3faa85ed-242c-42ff-afb4-990743c934db tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Terminating instance [ 2008.778921] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951563, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2008.786910] env[63024]: DEBUG oslo_concurrency.lockutils [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.689s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2008.793900] env[63024]: DEBUG oslo_concurrency.lockutils [None req-218e5f03-ca19-4d8c-a95d-2f4b86840cf3 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.403s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2008.793900] env[63024]: DEBUG nova.objects.instance [None req-218e5f03-ca19-4d8c-a95d-2f4b86840cf3 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Lazy-loading 'resources' on Instance uuid 01b8072a-4483-4932-8294-7e5b48e6b203 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2008.852528] env[63024]: DEBUG oslo_vmware.api [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951564, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2008.997815] env[63024]: DEBUG oslo_vmware.api [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Task: {'id': task-1951565, 'name': Rename_Task, 'duration_secs': 0.226024} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2009.001863] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2009.002442] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-07610a01-3229-4aa7-ae73-c44433228acf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.011388] env[63024]: DEBUG oslo_vmware.api [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Waiting for the task: (returnval){ [ 2009.011388] env[63024]: value = "task-1951566" [ 2009.011388] env[63024]: _type = "Task" [ 2009.011388] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2009.022268] env[63024]: DEBUG oslo_vmware.api [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Task: {'id': task-1951566, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2009.167484] env[63024]: DEBUG nova.network.neutron [req-463a474f-25e5-426e-a948-1bd19cfd4b95 req-00765a38-6c98-4559-b846-7d37254c4a65 service nova] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Updated VIF entry in instance network info cache for port 041c096f-ef1b-49ad-aadb-469b89fe4f25. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2009.168281] env[63024]: DEBUG nova.network.neutron [req-463a474f-25e5-426e-a948-1bd19cfd4b95 req-00765a38-6c98-4559-b846-7d37254c4a65 service nova] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Updating instance_info_cache with network_info: [{"id": "041c096f-ef1b-49ad-aadb-469b89fe4f25", "address": "fa:16:3e:8b:4e:6d", "network": {"id": "ffb24eaf-c6b6-414f-a69a-0c8806712ddd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-281590202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9521048e807c4ca2a6d2e74a72b829a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap041c096f-ef", "ovs_interfaceid": "041c096f-ef1b-49ad-aadb-469b89fe4f25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2009.179815] env[63024]: DEBUG oslo_vmware.api [None req-d80c0d46-f24b-4fb5-83b2-b55dcf890a2e tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951560, 'name': CloneVM_Task} progress is 95%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2009.260789] env[63024]: DEBUG nova.compute.manager [None req-3faa85ed-242c-42ff-afb4-990743c934db tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2009.261058] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3faa85ed-242c-42ff-afb4-990743c934db tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2009.262068] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65efa2d3-60d0-4023-849e-7613b3068f72 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.271525] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-3faa85ed-242c-42ff-afb4-990743c934db tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2009.272190] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-30633597-c05b-4ae6-81b8-d5b5ceffbcf2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.277739] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951563, 'name': CreateVM_Task, 'duration_secs': 0.568884} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2009.277898] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2009.278606] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2009.278850] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2009.279258] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2009.280151] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc0da6f0-a1bf-4fd4-8cf1-4ae3553a51a6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.282742] env[63024]: DEBUG oslo_vmware.api [None req-3faa85ed-242c-42ff-afb4-990743c934db tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Waiting for the task: (returnval){ [ 2009.282742] env[63024]: value = "task-1951567" [ 2009.282742] env[63024]: _type = "Task" [ 2009.282742] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2009.288254] env[63024]: DEBUG oslo_vmware.api [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 2009.288254] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]521b2abd-ff0b-7228-7ee5-e6cf1fb68bab" [ 2009.288254] env[63024]: _type = "Task" [ 2009.288254] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2009.298883] env[63024]: DEBUG oslo_vmware.api [None req-3faa85ed-242c-42ff-afb4-990743c934db tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951567, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2009.301653] env[63024]: DEBUG oslo_concurrency.lockutils [None req-441506c4-8676-47e7-a60f-146d0e696f24 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Lock "92d1f96e-bbe7-4654-9d3a-47ba40057157" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 46.941s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2009.302593] env[63024]: DEBUG oslo_concurrency.lockutils [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Lock "92d1f96e-bbe7-4654-9d3a-47ba40057157" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 24.077s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2009.303326] env[63024]: INFO nova.compute.manager [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Unshelving [ 2009.307924] env[63024]: DEBUG oslo_vmware.api [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]521b2abd-ff0b-7228-7ee5-e6cf1fb68bab, 'name': SearchDatastore_Task, 'duration_secs': 0.0103} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2009.308394] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2009.308628] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2009.308859] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2009.309018] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2009.310065] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2009.310065] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6586c629-869d-442b-afc3-54849434ba3f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.321331] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2009.321548] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2009.322338] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc823a8b-b354-4b05-adf1-a6021056cd99 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.329816] env[63024]: DEBUG oslo_vmware.api [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 2009.329816] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5259c44d-6281-c480-6e52-74a7d1e77a72" [ 2009.329816] env[63024]: _type = "Task" [ 2009.329816] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2009.342423] env[63024]: DEBUG oslo_vmware.api [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5259c44d-6281-c480-6e52-74a7d1e77a72, 'name': SearchDatastore_Task, 'duration_secs': 0.010535} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2009.346740] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7bc1b11c-34a5-4998-b265-61d7686c7d88 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.357733] env[63024]: DEBUG oslo_vmware.api [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 2009.357733] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52c8ef66-f4b3-3ce5-96e6-efe0e796d0d9" [ 2009.357733] env[63024]: _type = "Task" [ 2009.357733] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2009.362179] env[63024]: DEBUG oslo_vmware.api [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951564, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.528576} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2009.369259] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 50c72c53-ff72-42e6-afdc-14e0ac64f490/50c72c53-ff72-42e6-afdc-14e0ac64f490.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2009.369498] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2009.371023] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1706993d-d6d4-4cce-85e9-5aa25aea0764 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.382805] env[63024]: DEBUG oslo_vmware.api [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52c8ef66-f4b3-3ce5-96e6-efe0e796d0d9, 'name': SearchDatastore_Task, 'duration_secs': 0.010363} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2009.386990] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2009.387323] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 34e4db8e-e0d9-4a27-9368-c5e711b51a29/34e4db8e-e0d9-4a27-9368-c5e711b51a29.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2009.388029] env[63024]: DEBUG oslo_vmware.api [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2009.388029] env[63024]: value = "task-1951568" [ 2009.388029] env[63024]: _type = "Task" [ 2009.388029] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2009.388442] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4a750181-14b8-4f8a-9d37-d004048205be {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.400953] env[63024]: DEBUG oslo_vmware.api [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951568, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2009.405207] env[63024]: DEBUG oslo_vmware.api [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 2009.405207] env[63024]: value = "task-1951569" [ 2009.405207] env[63024]: _type = "Task" [ 2009.405207] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2009.415177] env[63024]: DEBUG oslo_vmware.api [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951569, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2009.525424] env[63024]: DEBUG oslo_vmware.api [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Task: {'id': task-1951566, 'name': PowerOnVM_Task, 'duration_secs': 0.486203} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2009.525747] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2009.525911] env[63024]: INFO nova.compute.manager [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Took 9.97 seconds to spawn the instance on the hypervisor. [ 2009.526147] env[63024]: DEBUG nova.compute.manager [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2009.526981] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-703f0e0a-8e73-482a-b385-85585f35f4b9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.575116] env[63024]: DEBUG nova.compute.manager [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Stashing vm_state: active {{(pid=63024) _prep_resize /opt/stack/nova/nova/compute/manager.py:5954}} [ 2009.675976] env[63024]: DEBUG oslo_concurrency.lockutils [req-463a474f-25e5-426e-a948-1bd19cfd4b95 req-00765a38-6c98-4559-b846-7d37254c4a65 service nova] Releasing lock "refresh_cache-34e4db8e-e0d9-4a27-9368-c5e711b51a29" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2009.683212] env[63024]: DEBUG oslo_vmware.api [None req-d80c0d46-f24b-4fb5-83b2-b55dcf890a2e tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951560, 'name': CloneVM_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2009.737112] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9227121-1926-497e-a57b-78743ad3f94e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.750149] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60fde9e2-93ae-495b-8aa8-17f74ef011c8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.796036] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14dbc421-f3b4-48c3-9b5d-2fb7a2344a33 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.808661] env[63024]: DEBUG oslo_vmware.api [None req-3faa85ed-242c-42ff-afb4-990743c934db tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951567, 'name': PowerOffVM_Task, 'duration_secs': 0.346552} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2009.811749] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-3faa85ed-242c-42ff-afb4-990743c934db tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2009.811970] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3faa85ed-242c-42ff-afb4-990743c934db tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2009.814704] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c8752c7a-98c8-42bf-b496-cdbdc5fa42cd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.817845] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e64bf44-e9ac-4e9e-a846-ca219cbc4be9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.838328] env[63024]: DEBUG nova.compute.provider_tree [None req-218e5f03-ca19-4d8c-a95d-2f4b86840cf3 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2009.902964] env[63024]: DEBUG oslo_vmware.api [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951568, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.161998} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2009.903292] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2009.904164] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bcb81b0-055b-406c-b784-e312fd92dec5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.934851] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] 50c72c53-ff72-42e6-afdc-14e0ac64f490/50c72c53-ff72-42e6-afdc-14e0ac64f490.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2009.938285] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9fcad93b-5204-4608-b93d-6269420c8ddb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.954195] env[63024]: DEBUG oslo_vmware.api [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951569, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2009.962334] env[63024]: DEBUG oslo_vmware.api [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2009.962334] env[63024]: value = "task-1951571" [ 2009.962334] env[63024]: _type = "Task" [ 2009.962334] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2009.970782] env[63024]: DEBUG oslo_vmware.api [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951571, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2010.047381] env[63024]: INFO nova.compute.manager [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Took 36.27 seconds to build instance. [ 2010.092622] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2010.183619] env[63024]: DEBUG oslo_vmware.api [None req-d80c0d46-f24b-4fb5-83b2-b55dcf890a2e tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951560, 'name': CloneVM_Task, 'duration_secs': 2.827704} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2010.184092] env[63024]: INFO nova.virt.vmwareapi.vmops [None req-d80c0d46-f24b-4fb5-83b2-b55dcf890a2e tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Created linked-clone VM from snapshot [ 2010.185343] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2407472-658d-4a42-8cf7-a69b3e6bb2f5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.188479] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3faa85ed-242c-42ff-afb4-990743c934db tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2010.188684] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3faa85ed-242c-42ff-afb4-990743c934db tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2010.188870] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-3faa85ed-242c-42ff-afb4-990743c934db tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Deleting the datastore file [datastore1] 73db94b8-cfa8-4457-bccb-d4b780edbd93 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2010.189469] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7ea60ef7-c285-427d-a571-d3c998144611 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.194532] env[63024]: DEBUG nova.virt.vmwareapi.images [None req-d80c0d46-f24b-4fb5-83b2-b55dcf890a2e tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Uploading image 57a07259-e3ec-4f18-8f83-5147d4254d72 {{(pid=63024) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2010.203243] env[63024]: DEBUG oslo_vmware.api [None req-3faa85ed-242c-42ff-afb4-990743c934db tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Waiting for the task: (returnval){ [ 2010.203243] env[63024]: value = "task-1951572" [ 2010.203243] env[63024]: _type = "Task" [ 2010.203243] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2010.208498] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d80c0d46-f24b-4fb5-83b2-b55dcf890a2e tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Destroying the VM {{(pid=63024) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2010.208839] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-065004bc-62bd-4347-bb45-47ef3cdcdc80 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.213956] env[63024]: DEBUG oslo_vmware.api [None req-3faa85ed-242c-42ff-afb4-990743c934db tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951572, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2010.220140] env[63024]: DEBUG oslo_vmware.api [None req-d80c0d46-f24b-4fb5-83b2-b55dcf890a2e tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Waiting for the task: (returnval){ [ 2010.220140] env[63024]: value = "task-1951573" [ 2010.220140] env[63024]: _type = "Task" [ 2010.220140] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2010.228393] env[63024]: DEBUG oslo_vmware.api [None req-d80c0d46-f24b-4fb5-83b2-b55dcf890a2e tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951573, 'name': Destroy_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2010.342454] env[63024]: DEBUG oslo_concurrency.lockutils [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2010.343473] env[63024]: DEBUG nova.scheduler.client.report [None req-218e5f03-ca19-4d8c-a95d-2f4b86840cf3 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2010.418138] env[63024]: DEBUG oslo_vmware.api [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951569, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.558793} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2010.420031] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 34e4db8e-e0d9-4a27-9368-c5e711b51a29/34e4db8e-e0d9-4a27-9368-c5e711b51a29.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2010.420031] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2010.420031] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8ff8ae41-0f84-4b46-861c-365ef577a0d4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.433419] env[63024]: DEBUG oslo_vmware.api [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 2010.433419] env[63024]: value = "task-1951574" [ 2010.433419] env[63024]: _type = "Task" [ 2010.433419] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2010.444792] env[63024]: DEBUG oslo_vmware.api [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951574, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2010.474914] env[63024]: DEBUG oslo_vmware.api [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951571, 'name': ReconfigVM_Task, 'duration_secs': 0.303327} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2010.477925] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Reconfigured VM instance instance-00000060 to attach disk [datastore1] 50c72c53-ff72-42e6-afdc-14e0ac64f490/50c72c53-ff72-42e6-afdc-14e0ac64f490.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2010.480749] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-16b3402b-6ff1-4379-b8b3-47d2908a99db {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.488392] env[63024]: DEBUG oslo_vmware.api [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2010.488392] env[63024]: value = "task-1951575" [ 2010.488392] env[63024]: _type = "Task" [ 2010.488392] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2010.500410] env[63024]: DEBUG oslo_vmware.api [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951575, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2010.549257] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b2b5752a-ce85-4518-9af5-a85fc5dbc144 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Lock "a694e49c-37c5-483f-b1d8-5426f6a52b73" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.779s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2010.634574] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3a428efa-3cb3-4c42-9ce3-9bdf6bfab201 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Acquiring lock "a694e49c-37c5-483f-b1d8-5426f6a52b73" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2010.635062] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3a428efa-3cb3-4c42-9ce3-9bdf6bfab201 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Lock "a694e49c-37c5-483f-b1d8-5426f6a52b73" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2010.635318] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3a428efa-3cb3-4c42-9ce3-9bdf6bfab201 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Acquiring lock "a694e49c-37c5-483f-b1d8-5426f6a52b73-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2010.635521] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3a428efa-3cb3-4c42-9ce3-9bdf6bfab201 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Lock "a694e49c-37c5-483f-b1d8-5426f6a52b73-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2010.635713] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3a428efa-3cb3-4c42-9ce3-9bdf6bfab201 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Lock "a694e49c-37c5-483f-b1d8-5426f6a52b73-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2010.638057] env[63024]: INFO nova.compute.manager [None req-3a428efa-3cb3-4c42-9ce3-9bdf6bfab201 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Terminating instance [ 2010.713974] env[63024]: DEBUG oslo_vmware.api [None req-3faa85ed-242c-42ff-afb4-990743c934db tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951572, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.17914} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2010.714202] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-3faa85ed-242c-42ff-afb4-990743c934db tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2010.714365] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3faa85ed-242c-42ff-afb4-990743c934db tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2010.714548] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3faa85ed-242c-42ff-afb4-990743c934db tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2010.714746] env[63024]: INFO nova.compute.manager [None req-3faa85ed-242c-42ff-afb4-990743c934db tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Took 1.45 seconds to destroy the instance on the hypervisor. [ 2010.714990] env[63024]: DEBUG oslo.service.loopingcall [None req-3faa85ed-242c-42ff-afb4-990743c934db tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2010.715192] env[63024]: DEBUG nova.compute.manager [-] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2010.715287] env[63024]: DEBUG nova.network.neutron [-] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2010.730205] env[63024]: DEBUG oslo_vmware.api [None req-d80c0d46-f24b-4fb5-83b2-b55dcf890a2e tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951573, 'name': Destroy_Task} progress is 33%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2010.849132] env[63024]: DEBUG oslo_concurrency.lockutils [None req-218e5f03-ca19-4d8c-a95d-2f4b86840cf3 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.057s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2010.851858] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.315s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2010.853400] env[63024]: INFO nova.compute.claims [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 85f31573-5535-4712-b736-747c43ed74b3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2010.876121] env[63024]: INFO nova.scheduler.client.report [None req-218e5f03-ca19-4d8c-a95d-2f4b86840cf3 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Deleted allocations for instance 01b8072a-4483-4932-8294-7e5b48e6b203 [ 2010.942048] env[63024]: DEBUG oslo_vmware.api [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951574, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.22369} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2010.942434] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2010.943246] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f92fe29-de6f-4131-b5fa-e6f271429dbd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.969405] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] 34e4db8e-e0d9-4a27-9368-c5e711b51a29/34e4db8e-e0d9-4a27-9368-c5e711b51a29.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2010.969627] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e0226be3-c780-4259-8bf3-3400610ef922 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.994329] env[63024]: DEBUG oslo_vmware.api [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 2010.994329] env[63024]: value = "task-1951576" [ 2010.994329] env[63024]: _type = "Task" [ 2010.994329] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2011.001358] env[63024]: DEBUG oslo_vmware.api [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951575, 'name': Rename_Task, 'duration_secs': 0.164661} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2011.002761] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2011.002761] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dea0644e-9f0b-4d5e-b874-e7325f2b31de {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.007252] env[63024]: DEBUG oslo_vmware.api [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951576, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2011.012424] env[63024]: DEBUG oslo_vmware.api [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2011.012424] env[63024]: value = "task-1951577" [ 2011.012424] env[63024]: _type = "Task" [ 2011.012424] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2011.020685] env[63024]: DEBUG oslo_vmware.api [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951577, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2011.142328] env[63024]: DEBUG nova.compute.manager [None req-3a428efa-3cb3-4c42-9ce3-9bdf6bfab201 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2011.142689] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3a428efa-3cb3-4c42-9ce3-9bdf6bfab201 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2011.143658] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f63de380-9533-4fe1-bcb6-a56d8084515a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.152576] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a428efa-3cb3-4c42-9ce3-9bdf6bfab201 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2011.152856] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-22cfd00d-8cc9-40f9-a4da-8d5726d3a161 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.160232] env[63024]: DEBUG oslo_vmware.api [None req-3a428efa-3cb3-4c42-9ce3-9bdf6bfab201 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Waiting for the task: (returnval){ [ 2011.160232] env[63024]: value = "task-1951578" [ 2011.160232] env[63024]: _type = "Task" [ 2011.160232] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2011.170951] env[63024]: DEBUG oslo_vmware.api [None req-3a428efa-3cb3-4c42-9ce3-9bdf6bfab201 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Task: {'id': task-1951578, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2011.233309] env[63024]: DEBUG oslo_vmware.api [None req-d80c0d46-f24b-4fb5-83b2-b55dcf890a2e tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951573, 'name': Destroy_Task, 'duration_secs': 0.779567} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2011.233816] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-d80c0d46-f24b-4fb5-83b2-b55dcf890a2e tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Destroyed the VM [ 2011.234240] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d80c0d46-f24b-4fb5-83b2-b55dcf890a2e tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Deleting Snapshot of the VM instance {{(pid=63024) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2011.234663] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-f2c22eec-77c2-4a5b-9639-e3e4140d5f9c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.245046] env[63024]: DEBUG oslo_vmware.api [None req-d80c0d46-f24b-4fb5-83b2-b55dcf890a2e tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Waiting for the task: (returnval){ [ 2011.245046] env[63024]: value = "task-1951579" [ 2011.245046] env[63024]: _type = "Task" [ 2011.245046] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2011.254250] env[63024]: DEBUG oslo_vmware.api [None req-d80c0d46-f24b-4fb5-83b2-b55dcf890a2e tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951579, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2011.385477] env[63024]: DEBUG oslo_concurrency.lockutils [None req-218e5f03-ca19-4d8c-a95d-2f4b86840cf3 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Lock "01b8072a-4483-4932-8294-7e5b48e6b203" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.257s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2011.514218] env[63024]: DEBUG oslo_vmware.api [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951576, 'name': ReconfigVM_Task, 'duration_secs': 0.411825} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2011.514218] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Reconfigured VM instance instance-00000061 to attach disk [datastore1] 34e4db8e-e0d9-4a27-9368-c5e711b51a29/34e4db8e-e0d9-4a27-9368-c5e711b51a29.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2011.514218] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d6320bb2-5232-404b-a6bb-b99658b47cc8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.529758] env[63024]: DEBUG oslo_vmware.api [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951577, 'name': PowerOnVM_Task, 'duration_secs': 0.474348} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2011.531735] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2011.532200] env[63024]: INFO nova.compute.manager [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Took 9.28 seconds to spawn the instance on the hypervisor. [ 2011.532367] env[63024]: DEBUG nova.compute.manager [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2011.533142] env[63024]: DEBUG oslo_vmware.api [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 2011.533142] env[63024]: value = "task-1951580" [ 2011.533142] env[63024]: _type = "Task" [ 2011.533142] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2011.533883] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb7caf7c-59aa-487c-9c2b-4ace32f324b9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.553995] env[63024]: DEBUG oslo_vmware.api [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951580, 'name': Rename_Task} progress is 10%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2011.673286] env[63024]: DEBUG oslo_vmware.api [None req-3a428efa-3cb3-4c42-9ce3-9bdf6bfab201 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Task: {'id': task-1951578, 'name': PowerOffVM_Task, 'duration_secs': 0.196294} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2011.673633] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-3a428efa-3cb3-4c42-9ce3-9bdf6bfab201 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2011.673712] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3a428efa-3cb3-4c42-9ce3-9bdf6bfab201 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2011.673961] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-206b56bd-0ac3-4ede-8e44-2636a32bba15 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.756386] env[63024]: DEBUG oslo_vmware.api [None req-d80c0d46-f24b-4fb5-83b2-b55dcf890a2e tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951579, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2011.784175] env[63024]: DEBUG nova.compute.manager [req-d4142280-d44c-4ce5-8461-c33f8719bf51 req-6fea1df8-61e3-45dc-bdd3-46691465265e service nova] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Received event network-vif-deleted-f78f097c-0df1-4f4f-8941-cf21c2b2ca4b {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2011.784175] env[63024]: INFO nova.compute.manager [req-d4142280-d44c-4ce5-8461-c33f8719bf51 req-6fea1df8-61e3-45dc-bdd3-46691465265e service nova] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Neutron deleted interface f78f097c-0df1-4f4f-8941-cf21c2b2ca4b; detaching it from the instance and deleting it from the info cache [ 2011.784175] env[63024]: DEBUG nova.network.neutron [req-d4142280-d44c-4ce5-8461-c33f8719bf51 req-6fea1df8-61e3-45dc-bdd3-46691465265e service nova] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2011.964068] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3a428efa-3cb3-4c42-9ce3-9bdf6bfab201 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2011.964068] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3a428efa-3cb3-4c42-9ce3-9bdf6bfab201 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2011.964068] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a428efa-3cb3-4c42-9ce3-9bdf6bfab201 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Deleting the datastore file [datastore1] a694e49c-37c5-483f-b1d8-5426f6a52b73 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2011.965031] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3f306d4b-8f8e-4b86-9fac-8023a6139996 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.971474] env[63024]: DEBUG oslo_vmware.api [None req-3a428efa-3cb3-4c42-9ce3-9bdf6bfab201 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Waiting for the task: (returnval){ [ 2011.971474] env[63024]: value = "task-1951582" [ 2011.971474] env[63024]: _type = "Task" [ 2011.971474] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2011.982760] env[63024]: DEBUG oslo_vmware.api [None req-3a428efa-3cb3-4c42-9ce3-9bdf6bfab201 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Task: {'id': task-1951582, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2012.046273] env[63024]: DEBUG oslo_vmware.api [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951580, 'name': Rename_Task, 'duration_secs': 0.21393} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2012.049204] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2012.049649] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f06b2e6f-dc62-4435-8f3b-6396ad44d5e4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.061739] env[63024]: DEBUG oslo_vmware.api [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 2012.061739] env[63024]: value = "task-1951583" [ 2012.061739] env[63024]: _type = "Task" [ 2012.061739] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2012.067430] env[63024]: INFO nova.compute.manager [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Took 35.96 seconds to build instance. [ 2012.076189] env[63024]: DEBUG oslo_vmware.api [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951583, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2012.101813] env[63024]: DEBUG nova.network.neutron [-] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2012.256688] env[63024]: DEBUG oslo_vmware.api [None req-d80c0d46-f24b-4fb5-83b2-b55dcf890a2e tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951579, 'name': RemoveSnapshot_Task, 'duration_secs': 0.656495} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2012.259625] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d80c0d46-f24b-4fb5-83b2-b55dcf890a2e tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Deleted Snapshot of the VM instance {{(pid=63024) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2012.273715] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d24ae3ed-10fa-4ae7-bff3-9b058088b412 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.283014] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5edcc07-96e5-47bd-a97e-d6dd6cbd47d1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.286620] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b6ff60c2-6c1d-481b-9142-a1954b66ff6b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.324243] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d94d17f0-dae6-461e-9330-bf41fa09041f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.337023] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f46f2d1-0538-4b79-9faf-d12009f62e56 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.349075] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-885d6703-498c-466e-9d28-b4dc29ca26f6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2012.362994] env[63024]: DEBUG nova.compute.manager [req-d4142280-d44c-4ce5-8461-c33f8719bf51 req-6fea1df8-61e3-45dc-bdd3-46691465265e service nova] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Detach interface failed, port_id=f78f097c-0df1-4f4f-8941-cf21c2b2ca4b, reason: Instance 73db94b8-cfa8-4457-bccb-d4b780edbd93 could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 2012.374191] env[63024]: DEBUG nova.compute.provider_tree [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2012.484598] env[63024]: DEBUG oslo_vmware.api [None req-3a428efa-3cb3-4c42-9ce3-9bdf6bfab201 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Task: {'id': task-1951582, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.28263} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2012.484848] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-3a428efa-3cb3-4c42-9ce3-9bdf6bfab201 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2012.485263] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3a428efa-3cb3-4c42-9ce3-9bdf6bfab201 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2012.486054] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3a428efa-3cb3-4c42-9ce3-9bdf6bfab201 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2012.486054] env[63024]: INFO nova.compute.manager [None req-3a428efa-3cb3-4c42-9ce3-9bdf6bfab201 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Took 1.34 seconds to destroy the instance on the hypervisor. [ 2012.486054] env[63024]: DEBUG oslo.service.loopingcall [None req-3a428efa-3cb3-4c42-9ce3-9bdf6bfab201 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2012.486422] env[63024]: DEBUG nova.compute.manager [-] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2012.486422] env[63024]: DEBUG nova.network.neutron [-] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2012.571855] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2bbdc043-11f3-40e0-8aed-1b837f30f157 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "50c72c53-ff72-42e6-afdc-14e0ac64f490" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 37.479s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2012.576728] env[63024]: DEBUG oslo_vmware.api [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951583, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2012.603455] env[63024]: INFO nova.compute.manager [-] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Took 1.89 seconds to deallocate network for instance. [ 2012.765143] env[63024]: WARNING nova.compute.manager [None req-d80c0d46-f24b-4fb5-83b2-b55dcf890a2e tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Image not found during snapshot: nova.exception.ImageNotFound: Image 57a07259-e3ec-4f18-8f83-5147d4254d72 could not be found. [ 2012.878986] env[63024]: DEBUG nova.scheduler.client.report [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2012.909088] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3f8fc803-bb4f-4617-933f-7972cce55655 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "50c72c53-ff72-42e6-afdc-14e0ac64f490" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2012.909208] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3f8fc803-bb4f-4617-933f-7972cce55655 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "50c72c53-ff72-42e6-afdc-14e0ac64f490" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2012.910077] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3f8fc803-bb4f-4617-933f-7972cce55655 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "50c72c53-ff72-42e6-afdc-14e0ac64f490-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2012.910077] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3f8fc803-bb4f-4617-933f-7972cce55655 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "50c72c53-ff72-42e6-afdc-14e0ac64f490-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2012.910077] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3f8fc803-bb4f-4617-933f-7972cce55655 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "50c72c53-ff72-42e6-afdc-14e0ac64f490-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2012.912870] env[63024]: INFO nova.compute.manager [None req-3f8fc803-bb4f-4617-933f-7972cce55655 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Terminating instance [ 2013.076908] env[63024]: DEBUG oslo_vmware.api [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951583, 'name': PowerOnVM_Task, 'duration_secs': 0.834585} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2013.077195] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2013.077388] env[63024]: INFO nova.compute.manager [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Took 8.25 seconds to spawn the instance on the hypervisor. [ 2013.077617] env[63024]: DEBUG nova.compute.manager [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2013.078347] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dd06133-6d3f-4e75-b9f7-8563aaa988fa {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.109638] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3faa85ed-242c-42ff-afb4-990743c934db tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2013.383967] env[63024]: DEBUG nova.network.neutron [-] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2013.385893] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.534s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2013.386417] env[63024]: DEBUG nova.compute.manager [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 85f31573-5535-4712-b736-747c43ed74b3] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2013.389856] env[63024]: DEBUG oslo_concurrency.lockutils [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.524s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2013.390107] env[63024]: DEBUG nova.objects.instance [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Lazy-loading 'resources' on Instance uuid 6c277ff8-ec25-4fd7-9dea-0efea9a0de29 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2013.416444] env[63024]: DEBUG nova.compute.manager [None req-3f8fc803-bb4f-4617-933f-7972cce55655 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2013.416683] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3f8fc803-bb4f-4617-933f-7972cce55655 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2013.417590] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8da8fcf-65f1-42d3-a46e-39dc56b24bf4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.427049] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f8fc803-bb4f-4617-933f-7972cce55655 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2013.427292] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e883eb05-93b6-45d4-a729-3e63d3f9fafb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.434100] env[63024]: DEBUG oslo_vmware.api [None req-3f8fc803-bb4f-4617-933f-7972cce55655 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2013.434100] env[63024]: value = "task-1951585" [ 2013.434100] env[63024]: _type = "Task" [ 2013.434100] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2013.443893] env[63024]: DEBUG oslo_vmware.api [None req-3f8fc803-bb4f-4617-933f-7972cce55655 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951585, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2013.596906] env[63024]: INFO nova.compute.manager [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Took 36.64 seconds to build instance. [ 2013.806056] env[63024]: DEBUG nova.compute.manager [req-219b1de7-0ccc-4000-aae3-3bc6683b9dfe req-37a8896c-1556-490a-8ba9-78d2cfb3a2f4 service nova] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Received event network-vif-deleted-ba67287f-e2da-402f-bab6-b8d2b9226aff {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2013.890379] env[63024]: INFO nova.compute.manager [-] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Took 1.40 seconds to deallocate network for instance. [ 2013.895858] env[63024]: DEBUG nova.compute.utils [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2013.899653] env[63024]: DEBUG nova.compute.manager [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 85f31573-5535-4712-b736-747c43ed74b3] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2013.899840] env[63024]: DEBUG nova.network.neutron [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 85f31573-5535-4712-b736-747c43ed74b3] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2013.948357] env[63024]: DEBUG oslo_vmware.api [None req-3f8fc803-bb4f-4617-933f-7972cce55655 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951585, 'name': PowerOffVM_Task, 'duration_secs': 0.211326} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2013.948512] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f8fc803-bb4f-4617-933f-7972cce55655 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2013.948684] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3f8fc803-bb4f-4617-933f-7972cce55655 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2013.948943] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a65f2f6f-9b93-45ef-bb25-0d2087ba1a9f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2013.976683] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f409249d-1f3e-493a-a133-cf12e9494c5b tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Acquiring lock "da1f5cbc-47bf-4ee4-837a-b328de170489" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2013.979375] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f409249d-1f3e-493a-a133-cf12e9494c5b tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Lock "da1f5cbc-47bf-4ee4-837a-b328de170489" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2013.979375] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f409249d-1f3e-493a-a133-cf12e9494c5b tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Acquiring lock "da1f5cbc-47bf-4ee4-837a-b328de170489-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2013.979375] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f409249d-1f3e-493a-a133-cf12e9494c5b tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Lock "da1f5cbc-47bf-4ee4-837a-b328de170489-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2013.979375] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f409249d-1f3e-493a-a133-cf12e9494c5b tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Lock "da1f5cbc-47bf-4ee4-837a-b328de170489-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2013.980196] env[63024]: INFO nova.compute.manager [None req-f409249d-1f3e-493a-a133-cf12e9494c5b tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Terminating instance [ 2013.988847] env[63024]: DEBUG nova.policy [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3fc112b4851e4dbeac3a69409e7bf98e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1886be852b01400aaf7a31c8fe5d4d7a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 2014.053141] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3f8fc803-bb4f-4617-933f-7972cce55655 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2014.053399] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3f8fc803-bb4f-4617-933f-7972cce55655 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2014.053590] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f8fc803-bb4f-4617-933f-7972cce55655 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Deleting the datastore file [datastore1] 50c72c53-ff72-42e6-afdc-14e0ac64f490 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2014.053855] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-17ba5b59-cdc8-437a-b283-26e5d8008491 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.062472] env[63024]: DEBUG oslo_vmware.api [None req-3f8fc803-bb4f-4617-933f-7972cce55655 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2014.062472] env[63024]: value = "task-1951587" [ 2014.062472] env[63024]: _type = "Task" [ 2014.062472] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2014.077040] env[63024]: DEBUG oslo_vmware.api [None req-3f8fc803-bb4f-4617-933f-7972cce55655 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951587, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2014.099639] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d8f681ac-d149-4008-9bfd-688a70cab289 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "34e4db8e-e0d9-4a27-9368-c5e711b51a29" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.181s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2014.306126] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-538a59fe-90d1-42f8-a0e6-627a2fd0b8de {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.314566] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3662864d-ecdd-4c71-88cb-e7f5f137d0b6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.346293] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50867cd2-30d2-47fd-b560-199284f6277c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.354736] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ed39829-5fce-4f91-b1d3-e7f7c7c35c17 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.369101] env[63024]: DEBUG nova.compute.provider_tree [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2014.403253] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3a428efa-3cb3-4c42-9ce3-9bdf6bfab201 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2014.403896] env[63024]: DEBUG nova.compute.manager [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 85f31573-5535-4712-b736-747c43ed74b3] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2014.492338] env[63024]: DEBUG nova.compute.manager [None req-f409249d-1f3e-493a-a133-cf12e9494c5b tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2014.492660] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f409249d-1f3e-493a-a133-cf12e9494c5b tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2014.493735] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e816ed1-45ff-4bb5-aab1-5b9d92a7c1c6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.502404] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-f409249d-1f3e-493a-a133-cf12e9494c5b tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2014.502985] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ef0a4c19-7a0d-40e6-992b-f57aa2e88d56 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2014.511705] env[63024]: DEBUG oslo_vmware.api [None req-f409249d-1f3e-493a-a133-cf12e9494c5b tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Waiting for the task: (returnval){ [ 2014.511705] env[63024]: value = "task-1951588" [ 2014.511705] env[63024]: _type = "Task" [ 2014.511705] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2014.521969] env[63024]: DEBUG oslo_vmware.api [None req-f409249d-1f3e-493a-a133-cf12e9494c5b tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951588, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2014.574643] env[63024]: DEBUG oslo_vmware.api [None req-3f8fc803-bb4f-4617-933f-7972cce55655 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951587, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.124686} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2014.574979] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f8fc803-bb4f-4617-933f-7972cce55655 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2014.575198] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3f8fc803-bb4f-4617-933f-7972cce55655 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2014.575386] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3f8fc803-bb4f-4617-933f-7972cce55655 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2014.575561] env[63024]: INFO nova.compute.manager [None req-3f8fc803-bb4f-4617-933f-7972cce55655 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Took 1.16 seconds to destroy the instance on the hypervisor. [ 2014.575808] env[63024]: DEBUG oslo.service.loopingcall [None req-3f8fc803-bb4f-4617-933f-7972cce55655 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2014.576015] env[63024]: DEBUG nova.compute.manager [-] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2014.576135] env[63024]: DEBUG nova.network.neutron [-] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2014.727128] env[63024]: DEBUG nova.network.neutron [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 85f31573-5535-4712-b736-747c43ed74b3] Successfully created port: 811b1be3-1c07-4661-b336-ef7e0b8b4b54 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2014.892502] env[63024]: ERROR nova.scheduler.client.report [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] [req-bb95f14f-973a-48aa-92f8-62bed4eba14f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 89dfa68a-133e-436f-a9f1-86051f9fb96b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-bb95f14f-973a-48aa-92f8-62bed4eba14f"}]} [ 2014.913632] env[63024]: DEBUG nova.scheduler.client.report [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Refreshing inventories for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 2014.936192] env[63024]: DEBUG nova.scheduler.client.report [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Updating ProviderTree inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 2014.936423] env[63024]: DEBUG nova.compute.provider_tree [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2014.949313] env[63024]: DEBUG nova.scheduler.client.report [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Refreshing aggregate associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, aggregates: None {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 2014.975702] env[63024]: DEBUG nova.scheduler.client.report [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Refreshing trait associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 2015.026460] env[63024]: DEBUG oslo_vmware.api [None req-f409249d-1f3e-493a-a133-cf12e9494c5b tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951588, 'name': PowerOffVM_Task, 'duration_secs': 0.230118} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2015.029463] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-f409249d-1f3e-493a-a133-cf12e9494c5b tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2015.029463] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f409249d-1f3e-493a-a133-cf12e9494c5b tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2015.029831] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b563ddb7-d0e6-4ef7-a63e-00a35518a887 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.115199] env[63024]: DEBUG nova.compute.manager [req-724d2735-b72b-4334-a2a6-aada141cf424 req-f23901d5-6c60-4815-8736-0d0a6d2ceb18 service nova] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Received event network-changed-041c096f-ef1b-49ad-aadb-469b89fe4f25 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2015.115693] env[63024]: DEBUG nova.compute.manager [req-724d2735-b72b-4334-a2a6-aada141cf424 req-f23901d5-6c60-4815-8736-0d0a6d2ceb18 service nova] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Refreshing instance network info cache due to event network-changed-041c096f-ef1b-49ad-aadb-469b89fe4f25. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2015.116040] env[63024]: DEBUG oslo_concurrency.lockutils [req-724d2735-b72b-4334-a2a6-aada141cf424 req-f23901d5-6c60-4815-8736-0d0a6d2ceb18 service nova] Acquiring lock "refresh_cache-34e4db8e-e0d9-4a27-9368-c5e711b51a29" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2015.117334] env[63024]: DEBUG oslo_concurrency.lockutils [req-724d2735-b72b-4334-a2a6-aada141cf424 req-f23901d5-6c60-4815-8736-0d0a6d2ceb18 service nova] Acquired lock "refresh_cache-34e4db8e-e0d9-4a27-9368-c5e711b51a29" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2015.117334] env[63024]: DEBUG nova.network.neutron [req-724d2735-b72b-4334-a2a6-aada141cf424 req-f23901d5-6c60-4815-8736-0d0a6d2ceb18 service nova] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Refreshing network info cache for port 041c096f-ef1b-49ad-aadb-469b89fe4f25 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2015.158517] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f409249d-1f3e-493a-a133-cf12e9494c5b tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2015.158785] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f409249d-1f3e-493a-a133-cf12e9494c5b tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2015.158996] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-f409249d-1f3e-493a-a133-cf12e9494c5b tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Deleting the datastore file [datastore1] da1f5cbc-47bf-4ee4-837a-b328de170489 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2015.159305] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-790a0e59-aa70-41e0-a943-a572e537d185 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.167676] env[63024]: DEBUG oslo_vmware.api [None req-f409249d-1f3e-493a-a133-cf12e9494c5b tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Waiting for the task: (returnval){ [ 2015.167676] env[63024]: value = "task-1951590" [ 2015.167676] env[63024]: _type = "Task" [ 2015.167676] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2015.175500] env[63024]: DEBUG oslo_vmware.api [None req-f409249d-1f3e-493a-a133-cf12e9494c5b tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951590, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2015.419469] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e40c327-5632-4fe9-bfdf-7a63e3d4818b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.422360] env[63024]: DEBUG nova.compute.manager [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 85f31573-5535-4712-b736-747c43ed74b3] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2015.431257] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be705be2-563d-4902-9667-6998d26d4df2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.474648] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c60fd0bb-be31-4660-8fe6-a349572b5a14 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.485228] env[63024]: DEBUG nova.virt.hardware [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2015.485610] env[63024]: DEBUG nova.virt.hardware [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2015.485862] env[63024]: DEBUG nova.virt.hardware [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2015.486119] env[63024]: DEBUG nova.virt.hardware [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2015.486382] env[63024]: DEBUG nova.virt.hardware [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2015.486534] env[63024]: DEBUG nova.virt.hardware [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2015.486855] env[63024]: DEBUG nova.virt.hardware [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2015.487097] env[63024]: DEBUG nova.virt.hardware [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2015.487333] env[63024]: DEBUG nova.virt.hardware [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2015.487573] env[63024]: DEBUG nova.virt.hardware [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2015.487819] env[63024]: DEBUG nova.virt.hardware [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2015.489521] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4db060d7-a113-41d2-a3e3-3191d95c9942 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.494992] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03ce4b92-5052-4d63-8110-0b9d7ae6916a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.519115] env[63024]: DEBUG nova.compute.provider_tree [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2015.522384] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60f88223-1ecd-4705-90ad-396aaec6c277 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2015.623455] env[63024]: DEBUG nova.network.neutron [-] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2015.678674] env[63024]: DEBUG oslo_vmware.api [None req-f409249d-1f3e-493a-a133-cf12e9494c5b tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951590, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.174065} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2015.678953] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-f409249d-1f3e-493a-a133-cf12e9494c5b tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2015.679160] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f409249d-1f3e-493a-a133-cf12e9494c5b tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2015.679341] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f409249d-1f3e-493a-a133-cf12e9494c5b tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2015.679551] env[63024]: INFO nova.compute.manager [None req-f409249d-1f3e-493a-a133-cf12e9494c5b tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Took 1.19 seconds to destroy the instance on the hypervisor. [ 2015.679748] env[63024]: DEBUG oslo.service.loopingcall [None req-f409249d-1f3e-493a-a133-cf12e9494c5b tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2015.679943] env[63024]: DEBUG nova.compute.manager [-] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2015.680054] env[63024]: DEBUG nova.network.neutron [-] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2016.046813] env[63024]: ERROR nova.scheduler.client.report [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] [req-518ad457-4523-4d36-b04d-87e15a6552a6] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 89dfa68a-133e-436f-a9f1-86051f9fb96b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-518ad457-4523-4d36-b04d-87e15a6552a6"}]} [ 2016.064606] env[63024]: DEBUG nova.compute.manager [req-b123358e-df30-4204-b66d-d9a211e75b6d req-8baf0e77-8bf0-4d8b-a063-e887b597ab13 service nova] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Received event network-vif-deleted-0bf9200e-b614-45e7-9926-4e6db7134da3 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2016.065990] env[63024]: DEBUG nova.scheduler.client.report [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Refreshing inventories for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 2016.085402] env[63024]: DEBUG nova.scheduler.client.report [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Updating ProviderTree inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 2016.085402] env[63024]: DEBUG nova.compute.provider_tree [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2016.099014] env[63024]: DEBUG nova.scheduler.client.report [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Refreshing aggregate associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, aggregates: None {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 2016.116849] env[63024]: DEBUG nova.scheduler.client.report [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Refreshing trait associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 2016.126378] env[63024]: INFO nova.compute.manager [-] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Took 1.55 seconds to deallocate network for instance. [ 2016.147923] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "8826c266-659c-46ad-bb02-aefdffab8699" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2016.148189] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "8826c266-659c-46ad-bb02-aefdffab8699" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2016.222890] env[63024]: DEBUG nova.network.neutron [req-724d2735-b72b-4334-a2a6-aada141cf424 req-f23901d5-6c60-4815-8736-0d0a6d2ceb18 service nova] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Updated VIF entry in instance network info cache for port 041c096f-ef1b-49ad-aadb-469b89fe4f25. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2016.223008] env[63024]: DEBUG nova.network.neutron [req-724d2735-b72b-4334-a2a6-aada141cf424 req-f23901d5-6c60-4815-8736-0d0a6d2ceb18 service nova] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Updating instance_info_cache with network_info: [{"id": "041c096f-ef1b-49ad-aadb-469b89fe4f25", "address": "fa:16:3e:8b:4e:6d", "network": {"id": "ffb24eaf-c6b6-414f-a69a-0c8806712ddd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-281590202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9521048e807c4ca2a6d2e74a72b829a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap041c096f-ef", "ovs_interfaceid": "041c096f-ef1b-49ad-aadb-469b89fe4f25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2016.532552] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-233b0562-430c-4142-9d08-6107f2b7a583 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.541171] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f620ad8-1eeb-4d4c-a1e0-b3c0e0ff7182 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.577589] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-852af2b8-c490-41da-a647-ac5059e828fa {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.584903] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e42471cd-2e90-4b18-b662-c40e3c363039 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2016.599887] env[63024]: DEBUG nova.compute.provider_tree [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2016.634035] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3f8fc803-bb4f-4617-933f-7972cce55655 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2016.653823] env[63024]: DEBUG nova.compute.manager [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2016.728101] env[63024]: DEBUG oslo_concurrency.lockutils [req-724d2735-b72b-4334-a2a6-aada141cf424 req-f23901d5-6c60-4815-8736-0d0a6d2ceb18 service nova] Releasing lock "refresh_cache-34e4db8e-e0d9-4a27-9368-c5e711b51a29" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2016.771896] env[63024]: DEBUG nova.network.neutron [-] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2016.807152] env[63024]: DEBUG nova.network.neutron [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 85f31573-5535-4712-b736-747c43ed74b3] Successfully updated port: 811b1be3-1c07-4661-b336-ef7e0b8b4b54 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2016.994637] env[63024]: DEBUG oslo_concurrency.lockutils [None req-25d047c0-30f0-4b26-826b-021125ebcfb4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquiring lock "31a693b6-293a-4f01-9baf-a9e7e8d453d4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2016.995007] env[63024]: DEBUG oslo_concurrency.lockutils [None req-25d047c0-30f0-4b26-826b-021125ebcfb4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Lock "31a693b6-293a-4f01-9baf-a9e7e8d453d4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2016.995282] env[63024]: DEBUG oslo_concurrency.lockutils [None req-25d047c0-30f0-4b26-826b-021125ebcfb4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquiring lock "31a693b6-293a-4f01-9baf-a9e7e8d453d4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2016.995496] env[63024]: DEBUG oslo_concurrency.lockutils [None req-25d047c0-30f0-4b26-826b-021125ebcfb4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Lock "31a693b6-293a-4f01-9baf-a9e7e8d453d4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2016.995682] env[63024]: DEBUG oslo_concurrency.lockutils [None req-25d047c0-30f0-4b26-826b-021125ebcfb4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Lock "31a693b6-293a-4f01-9baf-a9e7e8d453d4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2016.997993] env[63024]: INFO nova.compute.manager [None req-25d047c0-30f0-4b26-826b-021125ebcfb4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Terminating instance [ 2017.132334] env[63024]: DEBUG nova.scheduler.client.report [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Updated inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with generation 149 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 2017.132616] env[63024]: DEBUG nova.compute.provider_tree [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Updating resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b generation from 149 to 150 during operation: update_inventory {{(pid=63024) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2017.132802] env[63024]: DEBUG nova.compute.provider_tree [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2017.171975] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2017.273575] env[63024]: INFO nova.compute.manager [-] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Took 1.59 seconds to deallocate network for instance. [ 2017.309579] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "refresh_cache-85f31573-5535-4712-b736-747c43ed74b3" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2017.309748] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquired lock "refresh_cache-85f31573-5535-4712-b736-747c43ed74b3" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2017.309900] env[63024]: DEBUG nova.network.neutron [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 85f31573-5535-4712-b736-747c43ed74b3] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2017.502227] env[63024]: DEBUG nova.compute.manager [None req-25d047c0-30f0-4b26-826b-021125ebcfb4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2017.502563] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-25d047c0-30f0-4b26-826b-021125ebcfb4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2017.503614] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ca80482-431e-4076-b2c4-4e7ad244b451 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.512645] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-25d047c0-30f0-4b26-826b-021125ebcfb4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2017.512903] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f2983812-ccca-402a-b139-1cdd86bbfd2b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2017.521758] env[63024]: DEBUG oslo_vmware.api [None req-25d047c0-30f0-4b26-826b-021125ebcfb4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 2017.521758] env[63024]: value = "task-1951591" [ 2017.521758] env[63024]: _type = "Task" [ 2017.521758] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2017.532122] env[63024]: DEBUG oslo_vmware.api [None req-25d047c0-30f0-4b26-826b-021125ebcfb4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951591, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2017.638429] env[63024]: DEBUG oslo_concurrency.lockutils [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 4.248s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2017.640865] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0d9e2818-8d2a-418a-a421-57c6f02ed7ce tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.728s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2017.641072] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0d9e2818-8d2a-418a-a421-57c6f02ed7ce tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2017.642877] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b7933569-dd5c-4390-b5ea-b74b99b7578a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.319s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2017.643125] env[63024]: DEBUG nova.objects.instance [None req-b7933569-dd5c-4390-b5ea-b74b99b7578a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Lazy-loading 'resources' on Instance uuid c1fd4146-6dd3-49e9-a744-466e6168e158 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2017.664822] env[63024]: INFO nova.scheduler.client.report [None req-0d9e2818-8d2a-418a-a421-57c6f02ed7ce tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Deleted allocations for instance 9e8e7b6e-1bb2-4e66-b734-2f56e31302af [ 2017.667454] env[63024]: INFO nova.scheduler.client.report [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Deleted allocations for instance 6c277ff8-ec25-4fd7-9dea-0efea9a0de29 [ 2017.781176] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f409249d-1f3e-493a-a133-cf12e9494c5b tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2017.865973] env[63024]: DEBUG nova.network.neutron [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 85f31573-5535-4712-b736-747c43ed74b3] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2018.031593] env[63024]: DEBUG oslo_vmware.api [None req-25d047c0-30f0-4b26-826b-021125ebcfb4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951591, 'name': PowerOffVM_Task, 'duration_secs': 0.220768} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2018.031853] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-25d047c0-30f0-4b26-826b-021125ebcfb4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2018.032032] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-25d047c0-30f0-4b26-826b-021125ebcfb4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2018.032282] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aca2bd4b-8d49-467e-b0ab-c01473f9e898 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.042029] env[63024]: DEBUG nova.network.neutron [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 85f31573-5535-4712-b736-747c43ed74b3] Updating instance_info_cache with network_info: [{"id": "811b1be3-1c07-4661-b336-ef7e0b8b4b54", "address": "fa:16:3e:06:0f:ac", "network": {"id": "67ab6461-8fa5-4dfd-9267-561a710ae91b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1814728209-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1886be852b01400aaf7a31c8fe5d4d7a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap811b1be3-1c", "ovs_interfaceid": "811b1be3-1c07-4661-b336-ef7e0b8b4b54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2018.172596] env[63024]: DEBUG nova.compute.manager [req-e4d31d3d-d85a-4cad-87a6-96b80a2f5a91 req-18ec39ea-9e90-4838-a9b7-f0c7f6a279ee service nova] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Received event network-vif-deleted-1f29e0ef-5e8f-4e17-a724-f9270de55090 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2018.172832] env[63024]: DEBUG nova.compute.manager [req-e4d31d3d-d85a-4cad-87a6-96b80a2f5a91 req-18ec39ea-9e90-4838-a9b7-f0c7f6a279ee service nova] [instance: 85f31573-5535-4712-b736-747c43ed74b3] Received event network-vif-plugged-811b1be3-1c07-4661-b336-ef7e0b8b4b54 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2018.173099] env[63024]: DEBUG oslo_concurrency.lockutils [req-e4d31d3d-d85a-4cad-87a6-96b80a2f5a91 req-18ec39ea-9e90-4838-a9b7-f0c7f6a279ee service nova] Acquiring lock "85f31573-5535-4712-b736-747c43ed74b3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2018.173399] env[63024]: DEBUG oslo_concurrency.lockutils [req-e4d31d3d-d85a-4cad-87a6-96b80a2f5a91 req-18ec39ea-9e90-4838-a9b7-f0c7f6a279ee service nova] Lock "85f31573-5535-4712-b736-747c43ed74b3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2018.173585] env[63024]: DEBUG oslo_concurrency.lockutils [req-e4d31d3d-d85a-4cad-87a6-96b80a2f5a91 req-18ec39ea-9e90-4838-a9b7-f0c7f6a279ee service nova] Lock "85f31573-5535-4712-b736-747c43ed74b3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2018.173761] env[63024]: DEBUG nova.compute.manager [req-e4d31d3d-d85a-4cad-87a6-96b80a2f5a91 req-18ec39ea-9e90-4838-a9b7-f0c7f6a279ee service nova] [instance: 85f31573-5535-4712-b736-747c43ed74b3] No waiting events found dispatching network-vif-plugged-811b1be3-1c07-4661-b336-ef7e0b8b4b54 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2018.173923] env[63024]: WARNING nova.compute.manager [req-e4d31d3d-d85a-4cad-87a6-96b80a2f5a91 req-18ec39ea-9e90-4838-a9b7-f0c7f6a279ee service nova] [instance: 85f31573-5535-4712-b736-747c43ed74b3] Received unexpected event network-vif-plugged-811b1be3-1c07-4661-b336-ef7e0b8b4b54 for instance with vm_state building and task_state spawning. [ 2018.174094] env[63024]: DEBUG nova.compute.manager [req-e4d31d3d-d85a-4cad-87a6-96b80a2f5a91 req-18ec39ea-9e90-4838-a9b7-f0c7f6a279ee service nova] [instance: 85f31573-5535-4712-b736-747c43ed74b3] Received event network-changed-811b1be3-1c07-4661-b336-ef7e0b8b4b54 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2018.174255] env[63024]: DEBUG nova.compute.manager [req-e4d31d3d-d85a-4cad-87a6-96b80a2f5a91 req-18ec39ea-9e90-4838-a9b7-f0c7f6a279ee service nova] [instance: 85f31573-5535-4712-b736-747c43ed74b3] Refreshing instance network info cache due to event network-changed-811b1be3-1c07-4661-b336-ef7e0b8b4b54. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2018.174425] env[63024]: DEBUG oslo_concurrency.lockutils [req-e4d31d3d-d85a-4cad-87a6-96b80a2f5a91 req-18ec39ea-9e90-4838-a9b7-f0c7f6a279ee service nova] Acquiring lock "refresh_cache-85f31573-5535-4712-b736-747c43ed74b3" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2018.180910] env[63024]: DEBUG oslo_concurrency.lockutils [None req-915b2514-2f03-45da-b7b2-90f30582b06f tempest-ServerDiagnosticsNegativeTest-1413299664 tempest-ServerDiagnosticsNegativeTest-1413299664-project-member] Lock "6c277ff8-ec25-4fd7-9dea-0efea9a0de29" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.185s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2018.182371] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0d9e2818-8d2a-418a-a421-57c6f02ed7ce tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "9e8e7b6e-1bb2-4e66-b734-2f56e31302af" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.215s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2018.199206] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-25d047c0-30f0-4b26-826b-021125ebcfb4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2018.199468] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-25d047c0-30f0-4b26-826b-021125ebcfb4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2018.199895] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-25d047c0-30f0-4b26-826b-021125ebcfb4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Deleting the datastore file [datastore1] 31a693b6-293a-4f01-9baf-a9e7e8d453d4 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2018.200564] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d602e484-88f2-469f-b313-c46d1c01a337 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.212527] env[63024]: DEBUG oslo_vmware.api [None req-25d047c0-30f0-4b26-826b-021125ebcfb4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 2018.212527] env[63024]: value = "task-1951593" [ 2018.212527] env[63024]: _type = "Task" [ 2018.212527] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2018.222492] env[63024]: DEBUG oslo_vmware.api [None req-25d047c0-30f0-4b26-826b-021125ebcfb4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951593, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2018.513341] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d20e9e2b-4a4f-4f5a-98c7-17551bc19e1d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.522352] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd50ce0a-0ff4-4444-8702-ce587d25e323 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.556715] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Releasing lock "refresh_cache-85f31573-5535-4712-b736-747c43ed74b3" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2018.556715] env[63024]: DEBUG nova.compute.manager [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 85f31573-5535-4712-b736-747c43ed74b3] Instance network_info: |[{"id": "811b1be3-1c07-4661-b336-ef7e0b8b4b54", "address": "fa:16:3e:06:0f:ac", "network": {"id": "67ab6461-8fa5-4dfd-9267-561a710ae91b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1814728209-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1886be852b01400aaf7a31c8fe5d4d7a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap811b1be3-1c", "ovs_interfaceid": "811b1be3-1c07-4661-b336-ef7e0b8b4b54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2018.557208] env[63024]: DEBUG oslo_concurrency.lockutils [req-e4d31d3d-d85a-4cad-87a6-96b80a2f5a91 req-18ec39ea-9e90-4838-a9b7-f0c7f6a279ee service nova] Acquired lock "refresh_cache-85f31573-5535-4712-b736-747c43ed74b3" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2018.557401] env[63024]: DEBUG nova.network.neutron [req-e4d31d3d-d85a-4cad-87a6-96b80a2f5a91 req-18ec39ea-9e90-4838-a9b7-f0c7f6a279ee service nova] [instance: 85f31573-5535-4712-b736-747c43ed74b3] Refreshing network info cache for port 811b1be3-1c07-4661-b336-ef7e0b8b4b54 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2018.558701] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 85f31573-5535-4712-b736-747c43ed74b3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:06:0f:ac', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'abe48956-848a-4e1f-b1f1-a27baa5390b9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '811b1be3-1c07-4661-b336-ef7e0b8b4b54', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2018.566606] env[63024]: DEBUG oslo.service.loopingcall [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2018.567300] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2fa9816-2dd9-4004-a42e-d5f8d6e00f93 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.572410] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85f31573-5535-4712-b736-747c43ed74b3] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2018.573030] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9aed1df5-44d9-4f90-8f3b-96fc064fbce5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.593565] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54d2242d-ddce-4f04-8215-0c0f37a5f164 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2018.599461] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2018.599461] env[63024]: value = "task-1951594" [ 2018.599461] env[63024]: _type = "Task" [ 2018.599461] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2018.611964] env[63024]: DEBUG nova.compute.provider_tree [None req-b7933569-dd5c-4390-b5ea-b74b99b7578a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2018.616214] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951594, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2018.726643] env[63024]: DEBUG oslo_vmware.api [None req-25d047c0-30f0-4b26-826b-021125ebcfb4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951593, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.301238} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2018.726892] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-25d047c0-30f0-4b26-826b-021125ebcfb4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2018.727091] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-25d047c0-30f0-4b26-826b-021125ebcfb4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2018.727274] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-25d047c0-30f0-4b26-826b-021125ebcfb4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2018.727447] env[63024]: INFO nova.compute.manager [None req-25d047c0-30f0-4b26-826b-021125ebcfb4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Took 1.23 seconds to destroy the instance on the hypervisor. [ 2018.727868] env[63024]: DEBUG oslo.service.loopingcall [None req-25d047c0-30f0-4b26-826b-021125ebcfb4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2018.727868] env[63024]: DEBUG nova.compute.manager [-] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2018.727991] env[63024]: DEBUG nova.network.neutron [-] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2018.954894] env[63024]: DEBUG nova.network.neutron [req-e4d31d3d-d85a-4cad-87a6-96b80a2f5a91 req-18ec39ea-9e90-4838-a9b7-f0c7f6a279ee service nova] [instance: 85f31573-5535-4712-b736-747c43ed74b3] Updated VIF entry in instance network info cache for port 811b1be3-1c07-4661-b336-ef7e0b8b4b54. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2018.954894] env[63024]: DEBUG nova.network.neutron [req-e4d31d3d-d85a-4cad-87a6-96b80a2f5a91 req-18ec39ea-9e90-4838-a9b7-f0c7f6a279ee service nova] [instance: 85f31573-5535-4712-b736-747c43ed74b3] Updating instance_info_cache with network_info: [{"id": "811b1be3-1c07-4661-b336-ef7e0b8b4b54", "address": "fa:16:3e:06:0f:ac", "network": {"id": "67ab6461-8fa5-4dfd-9267-561a710ae91b", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-1814728209-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1886be852b01400aaf7a31c8fe5d4d7a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "abe48956-848a-4e1f-b1f1-a27baa5390b9", "external-id": "nsx-vlan-transportzone-238", "segmentation_id": 238, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap811b1be3-1c", "ovs_interfaceid": "811b1be3-1c07-4661-b336-ef7e0b8b4b54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2019.111314] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951594, 'name': CreateVM_Task, 'duration_secs': 0.485134} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2019.121357] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85f31573-5535-4712-b736-747c43ed74b3] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2019.121357] env[63024]: DEBUG nova.scheduler.client.report [None req-b7933569-dd5c-4390-b5ea-b74b99b7578a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2019.127091] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2019.127447] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2019.127858] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2019.129556] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c426d3b-4e53-45b0-8c91-2dfe72c72da9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.136186] env[63024]: DEBUG oslo_vmware.api [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 2019.136186] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]526befb2-6e8c-5e30-2371-1b362489f6f8" [ 2019.136186] env[63024]: _type = "Task" [ 2019.136186] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2019.146397] env[63024]: DEBUG oslo_vmware.api [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]526befb2-6e8c-5e30-2371-1b362489f6f8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2019.457545] env[63024]: DEBUG oslo_concurrency.lockutils [req-e4d31d3d-d85a-4cad-87a6-96b80a2f5a91 req-18ec39ea-9e90-4838-a9b7-f0c7f6a279ee service nova] Releasing lock "refresh_cache-85f31573-5535-4712-b736-747c43ed74b3" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2019.631139] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b7933569-dd5c-4390-b5ea-b74b99b7578a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.988s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2019.633569] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7ae17958-bf6f-4ed2-ac39-e07f251c3b72 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.068s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2019.633811] env[63024]: DEBUG nova.objects.instance [None req-7ae17958-bf6f-4ed2-ac39-e07f251c3b72 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Lazy-loading 'resources' on Instance uuid 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2019.636067] env[63024]: DEBUG nova.network.neutron [-] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2019.649513] env[63024]: DEBUG oslo_vmware.api [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]526befb2-6e8c-5e30-2371-1b362489f6f8, 'name': SearchDatastore_Task, 'duration_secs': 0.011311} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2019.649911] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2019.650219] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 85f31573-5535-4712-b736-747c43ed74b3] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2019.650639] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2019.650856] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2019.651152] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2019.651805] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0a3d0235-c267-4901-9c6f-e9d54060cc8b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.656301] env[63024]: INFO nova.scheduler.client.report [None req-b7933569-dd5c-4390-b5ea-b74b99b7578a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Deleted allocations for instance c1fd4146-6dd3-49e9-a744-466e6168e158 [ 2019.662774] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2019.662958] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2019.663721] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e8557e2-677d-4367-95c0-1def19157278 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2019.670117] env[63024]: DEBUG oslo_vmware.api [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 2019.670117] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]523ab6cb-96b4-32e4-ca94-b13b8a5def48" [ 2019.670117] env[63024]: _type = "Task" [ 2019.670117] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2019.678962] env[63024]: DEBUG oslo_vmware.api [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]523ab6cb-96b4-32e4-ca94-b13b8a5def48, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2020.143629] env[63024]: INFO nova.compute.manager [-] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Took 1.42 seconds to deallocate network for instance. [ 2020.167385] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b7933569-dd5c-4390-b5ea-b74b99b7578a tempest-ServersAdminTestJSON-1761531644 tempest-ServersAdminTestJSON-1761531644-project-member] Lock "c1fd4146-6dd3-49e9-a744-466e6168e158" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.643s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2020.180975] env[63024]: DEBUG oslo_vmware.api [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]523ab6cb-96b4-32e4-ca94-b13b8a5def48, 'name': SearchDatastore_Task, 'duration_secs': 0.012048} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2020.185574] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a010b620-0fc0-4132-9913-5d17ef1501db {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.192027] env[63024]: DEBUG oslo_vmware.api [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 2020.192027] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]522bedd8-a71c-a1ff-0bec-dfdd84874408" [ 2020.192027] env[63024]: _type = "Task" [ 2020.192027] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2020.200932] env[63024]: DEBUG nova.compute.manager [req-a85124b4-88a5-4e68-bcf1-f45f0a77d135 req-b7fa520e-0c59-4d27-b6ad-50aed4b8fa86 service nova] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Received event network-vif-deleted-8530bff5-1223-4b93-9b55-536f7665048a {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2020.205319] env[63024]: DEBUG oslo_vmware.api [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]522bedd8-a71c-a1ff-0bec-dfdd84874408, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2020.488960] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cf3a5ac-9e47-4433-9769-5e8dca53d5c0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.498091] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97d227f3-6b21-460e-a625-d436793d7693 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.534492] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c0ddc95-ad89-4411-8e05-16f4573d7129 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.543455] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-589178d7-fd56-43a9-bd09-777ca89596ec {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.557852] env[63024]: DEBUG nova.compute.provider_tree [None req-7ae17958-bf6f-4ed2-ac39-e07f251c3b72 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2020.651578] env[63024]: DEBUG oslo_concurrency.lockutils [None req-25d047c0-30f0-4b26-826b-021125ebcfb4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2020.708524] env[63024]: DEBUG oslo_vmware.api [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]522bedd8-a71c-a1ff-0bec-dfdd84874408, 'name': SearchDatastore_Task, 'duration_secs': 0.020918} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2020.708524] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2020.708524] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 85f31573-5535-4712-b736-747c43ed74b3/85f31573-5535-4712-b736-747c43ed74b3.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2020.708524] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6ebed7c0-829f-4c56-a7d2-158a5ad7c45a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2020.717136] env[63024]: DEBUG oslo_vmware.api [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 2020.717136] env[63024]: value = "task-1951595" [ 2020.717136] env[63024]: _type = "Task" [ 2020.717136] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2020.725399] env[63024]: DEBUG oslo_vmware.api [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951595, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2021.062147] env[63024]: DEBUG nova.scheduler.client.report [None req-7ae17958-bf6f-4ed2-ac39-e07f251c3b72 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2021.227791] env[63024]: DEBUG oslo_vmware.api [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951595, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2021.572020] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7ae17958-bf6f-4ed2-ac39-e07f251c3b72 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.935s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2021.574317] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f201dcc2-4634-49fd-8d11-54d315d7a13d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.578s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2021.574654] env[63024]: DEBUG nova.objects.instance [None req-f201dcc2-4634-49fd-8d11-54d315d7a13d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lazy-loading 'resources' on Instance uuid e9784dce-9a3f-4969-b48c-9c5b17959d88 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2021.599759] env[63024]: INFO nova.scheduler.client.report [None req-7ae17958-bf6f-4ed2-ac39-e07f251c3b72 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Deleted allocations for instance 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d [ 2021.728740] env[63024]: DEBUG oslo_vmware.api [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951595, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.607267} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2021.729285] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 85f31573-5535-4712-b736-747c43ed74b3/85f31573-5535-4712-b736-747c43ed74b3.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2021.729852] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 85f31573-5535-4712-b736-747c43ed74b3] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2021.729852] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d8ec46ef-4c03-4d52-9b69-4aceca9b5fdf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2021.738107] env[63024]: DEBUG oslo_vmware.api [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 2021.738107] env[63024]: value = "task-1951596" [ 2021.738107] env[63024]: _type = "Task" [ 2021.738107] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2021.747927] env[63024]: DEBUG oslo_vmware.api [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951596, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2022.111744] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7ae17958-bf6f-4ed2-ac39-e07f251c3b72 tempest-ServerAddressesTestJSON-198470985 tempest-ServerAddressesTestJSON-198470985-project-member] Lock "51bdfe4a-2439-4ad5-97f3-f60c70c87b9d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.777s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2022.249671] env[63024]: DEBUG oslo_vmware.api [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951596, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07491} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2022.250688] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 85f31573-5535-4712-b736-747c43ed74b3] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2022.250744] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c60854ea-0715-4b4f-84c0-02ca44b280f7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.279137] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 85f31573-5535-4712-b736-747c43ed74b3] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] 85f31573-5535-4712-b736-747c43ed74b3/85f31573-5535-4712-b736-747c43ed74b3.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2022.282057] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-984e51cb-70b9-441c-b928-08233f76268f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.307586] env[63024]: DEBUG oslo_vmware.api [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 2022.307586] env[63024]: value = "task-1951597" [ 2022.307586] env[63024]: _type = "Task" [ 2022.307586] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2022.320032] env[63024]: DEBUG oslo_vmware.api [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951597, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2022.479695] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-571572e5-fc1b-4d79-8018-1919d760b873 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.488959] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e2d8610-910a-44bc-b58c-d4a539605faf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.530187] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0698e977-f8a9-4486-b597-5d93265439f4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.539814] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ef9a1e5-1d3c-461c-b3f7-5f160366bed8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.554408] env[63024]: DEBUG nova.compute.provider_tree [None req-f201dcc2-4634-49fd-8d11-54d315d7a13d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2022.818553] env[63024]: DEBUG oslo_vmware.api [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951597, 'name': ReconfigVM_Task, 'duration_secs': 0.311152} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2022.818870] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 85f31573-5535-4712-b736-747c43ed74b3] Reconfigured VM instance instance-00000062 to attach disk [datastore1] 85f31573-5535-4712-b736-747c43ed74b3/85f31573-5535-4712-b736-747c43ed74b3.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2022.819489] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f5f3cdcd-e6d9-4bab-a9c7-fd215b459bfe {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2022.826613] env[63024]: DEBUG oslo_vmware.api [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 2022.826613] env[63024]: value = "task-1951598" [ 2022.826613] env[63024]: _type = "Task" [ 2022.826613] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2022.834894] env[63024]: DEBUG oslo_vmware.api [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951598, 'name': Rename_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2023.057262] env[63024]: DEBUG nova.scheduler.client.report [None req-f201dcc2-4634-49fd-8d11-54d315d7a13d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2023.337134] env[63024]: DEBUG oslo_vmware.api [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951598, 'name': Rename_Task, 'duration_secs': 0.15067} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2023.337511] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 85f31573-5535-4712-b736-747c43ed74b3] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2023.337831] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6299a342-960e-4ad8-8dba-ee80c100c097 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2023.346406] env[63024]: DEBUG oslo_vmware.api [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 2023.346406] env[63024]: value = "task-1951599" [ 2023.346406] env[63024]: _type = "Task" [ 2023.346406] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2023.356021] env[63024]: DEBUG oslo_vmware.api [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951599, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2023.565068] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f201dcc2-4634-49fd-8d11-54d315d7a13d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.989s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2023.565473] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.486s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2023.566988] env[63024]: INFO nova.compute.claims [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2023.598510] env[63024]: INFO nova.scheduler.client.report [None req-f201dcc2-4634-49fd-8d11-54d315d7a13d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Deleted allocations for instance e9784dce-9a3f-4969-b48c-9c5b17959d88 [ 2023.858699] env[63024]: DEBUG oslo_vmware.api [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951599, 'name': PowerOnVM_Task, 'duration_secs': 0.448076} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2023.859098] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 85f31573-5535-4712-b736-747c43ed74b3] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2023.859615] env[63024]: INFO nova.compute.manager [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 85f31573-5535-4712-b736-747c43ed74b3] Took 8.44 seconds to spawn the instance on the hypervisor. [ 2023.860102] env[63024]: DEBUG nova.compute.manager [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 85f31573-5535-4712-b736-747c43ed74b3] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2023.861090] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-251ac1a6-3edb-4b0d-bced-946a741dc77d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.112148] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f201dcc2-4634-49fd-8d11-54d315d7a13d tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "e9784dce-9a3f-4969-b48c-9c5b17959d88" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.465s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2024.385323] env[63024]: INFO nova.compute.manager [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 85f31573-5535-4712-b736-747c43ed74b3] Took 39.87 seconds to build instance. [ 2024.886973] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d11b0213-73a7-43ff-b2a6-9a691889baf9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.889739] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0ecde367-e099-4fec-8f3d-614ff44d9684 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "85f31573-5535-4712-b736-747c43ed74b3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.382s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2024.895672] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a76ad18f-264b-4f25-b0d7-2442c33542c0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.927509] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d14f6b54-757e-4ae4-999b-4aad009b1410 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.935991] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2895f35f-c134-460e-a68c-442975580de0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2024.950545] env[63024]: DEBUG nova.compute.provider_tree [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2024.969625] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e8102a09-b524-44d8-a7fc-0e1711db2b45 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "9e32eb32-6eff-4875-b4a3-adfab4647023" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2024.969859] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e8102a09-b524-44d8-a7fc-0e1711db2b45 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "9e32eb32-6eff-4875-b4a3-adfab4647023" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2024.970078] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e8102a09-b524-44d8-a7fc-0e1711db2b45 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "9e32eb32-6eff-4875-b4a3-adfab4647023-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2024.970279] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e8102a09-b524-44d8-a7fc-0e1711db2b45 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "9e32eb32-6eff-4875-b4a3-adfab4647023-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2024.970487] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e8102a09-b524-44d8-a7fc-0e1711db2b45 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "9e32eb32-6eff-4875-b4a3-adfab4647023-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2024.972331] env[63024]: INFO nova.compute.manager [None req-e8102a09-b524-44d8-a7fc-0e1711db2b45 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Terminating instance [ 2025.287056] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Acquiring lock "c5541241-84e2-4216-b6f9-4c716f29d759" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2025.287056] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Lock "c5541241-84e2-4216-b6f9-4c716f29d759" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2025.453952] env[63024]: DEBUG nova.scheduler.client.report [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2025.475891] env[63024]: DEBUG nova.compute.manager [None req-e8102a09-b524-44d8-a7fc-0e1711db2b45 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2025.475891] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e8102a09-b524-44d8-a7fc-0e1711db2b45 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2025.476801] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f289783d-a2a2-43c5-9a34-b93ed15d2308 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.486748] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8102a09-b524-44d8-a7fc-0e1711db2b45 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2025.487080] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9e9ee705-1b6a-4c96-91ba-6611d3ee3b78 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2025.494927] env[63024]: DEBUG oslo_vmware.api [None req-e8102a09-b524-44d8-a7fc-0e1711db2b45 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 2025.494927] env[63024]: value = "task-1951600" [ 2025.494927] env[63024]: _type = "Task" [ 2025.494927] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2025.504651] env[63024]: DEBUG oslo_vmware.api [None req-e8102a09-b524-44d8-a7fc-0e1711db2b45 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951600, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2025.790070] env[63024]: DEBUG nova.compute.manager [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2025.959568] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.394s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2025.960113] env[63024]: DEBUG nova.compute.manager [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2025.963442] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4e8b7539-2399-427e-b262-bcb4679c86c0 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.777s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2025.963555] env[63024]: DEBUG nova.objects.instance [None req-4e8b7539-2399-427e-b262-bcb4679c86c0 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Lazy-loading 'resources' on Instance uuid c71abfaa-dc65-4d1b-8a34-dff9dd682fe7 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2026.007684] env[63024]: DEBUG oslo_vmware.api [None req-e8102a09-b524-44d8-a7fc-0e1711db2b45 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951600, 'name': PowerOffVM_Task, 'duration_secs': 0.397142} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2026.007983] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8102a09-b524-44d8-a7fc-0e1711db2b45 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2026.008174] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e8102a09-b524-44d8-a7fc-0e1711db2b45 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2026.008429] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-32eabdee-fbfa-4780-8ce9-24924ebeda61 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.107973] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e8102a09-b524-44d8-a7fc-0e1711db2b45 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2026.108216] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e8102a09-b524-44d8-a7fc-0e1711db2b45 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2026.108406] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8102a09-b524-44d8-a7fc-0e1711db2b45 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Deleting the datastore file [datastore1] 9e32eb32-6eff-4875-b4a3-adfab4647023 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2026.108737] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fa610ff5-4268-498d-911e-7167e3d12a0d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.116996] env[63024]: DEBUG oslo_vmware.api [None req-e8102a09-b524-44d8-a7fc-0e1711db2b45 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 2026.116996] env[63024]: value = "task-1951602" [ 2026.116996] env[63024]: _type = "Task" [ 2026.116996] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2026.125229] env[63024]: DEBUG oslo_vmware.api [None req-e8102a09-b524-44d8-a7fc-0e1711db2b45 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951602, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2026.314384] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2026.430449] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d4040e73-c55c-432c-99ce-b4d361589cb6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "85f31573-5535-4712-b736-747c43ed74b3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2026.430803] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d4040e73-c55c-432c-99ce-b4d361589cb6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "85f31573-5535-4712-b736-747c43ed74b3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2026.431152] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d4040e73-c55c-432c-99ce-b4d361589cb6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "85f31573-5535-4712-b736-747c43ed74b3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2026.431349] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d4040e73-c55c-432c-99ce-b4d361589cb6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "85f31573-5535-4712-b736-747c43ed74b3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2026.431563] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d4040e73-c55c-432c-99ce-b4d361589cb6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "85f31573-5535-4712-b736-747c43ed74b3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2026.434992] env[63024]: INFO nova.compute.manager [None req-d4040e73-c55c-432c-99ce-b4d361589cb6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 85f31573-5535-4712-b736-747c43ed74b3] Terminating instance [ 2026.466695] env[63024]: DEBUG nova.compute.utils [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2026.471100] env[63024]: DEBUG nova.compute.manager [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2026.471399] env[63024]: DEBUG nova.network.neutron [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2026.530313] env[63024]: DEBUG nova.policy [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f215c99539cd43039ffdb0c6cf70beaf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0d0715f0ccbd49ec8af8e3049d970994', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 2026.630389] env[63024]: DEBUG oslo_vmware.api [None req-e8102a09-b524-44d8-a7fc-0e1711db2b45 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951602, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.271871} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2026.630739] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8102a09-b524-44d8-a7fc-0e1711db2b45 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2026.630995] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e8102a09-b524-44d8-a7fc-0e1711db2b45 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2026.631150] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e8102a09-b524-44d8-a7fc-0e1711db2b45 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2026.631323] env[63024]: INFO nova.compute.manager [None req-e8102a09-b524-44d8-a7fc-0e1711db2b45 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Took 1.16 seconds to destroy the instance on the hypervisor. [ 2026.631570] env[63024]: DEBUG oslo.service.loopingcall [None req-e8102a09-b524-44d8-a7fc-0e1711db2b45 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2026.631774] env[63024]: DEBUG nova.compute.manager [-] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2026.631851] env[63024]: DEBUG nova.network.neutron [-] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2026.778185] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f674c50-b035-4521-990c-487b3adf4721 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.786584] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59868853-4094-48ea-8339-5ba39bebdf6b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.816220] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd5d09e0-48a7-422b-b91f-a929c1f5e27d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.824092] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7c5328b-ddb2-4ec0-9295-0428dbcfe261 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.838383] env[63024]: DEBUG nova.compute.provider_tree [None req-4e8b7539-2399-427e-b262-bcb4679c86c0 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2026.942018] env[63024]: DEBUG nova.compute.manager [None req-d4040e73-c55c-432c-99ce-b4d361589cb6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 85f31573-5535-4712-b736-747c43ed74b3] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2026.942018] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d4040e73-c55c-432c-99ce-b4d361589cb6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 85f31573-5535-4712-b736-747c43ed74b3] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2026.942596] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69f3f21e-f1e0-47b6-90a0-3867336c59aa {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.955310] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4040e73-c55c-432c-99ce-b4d361589cb6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 85f31573-5535-4712-b736-747c43ed74b3] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2026.955310] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7c3e4028-ca61-4653-9a97-3aba410518fc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2026.962442] env[63024]: DEBUG oslo_vmware.api [None req-d4040e73-c55c-432c-99ce-b4d361589cb6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 2026.962442] env[63024]: value = "task-1951603" [ 2026.962442] env[63024]: _type = "Task" [ 2026.962442] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2026.973955] env[63024]: DEBUG nova.compute.manager [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2026.976768] env[63024]: DEBUG oslo_vmware.api [None req-d4040e73-c55c-432c-99ce-b4d361589cb6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951603, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2027.121241] env[63024]: DEBUG nova.compute.manager [req-360a4ffe-1fb4-442c-a7e1-b6c76812d0fa req-c7afa9fb-b6f2-44aa-a461-61a49e1ef8b5 service nova] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Received event network-vif-deleted-a75c5293-2308-41d5-9464-4013af532f66 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2027.121241] env[63024]: INFO nova.compute.manager [req-360a4ffe-1fb4-442c-a7e1-b6c76812d0fa req-c7afa9fb-b6f2-44aa-a461-61a49e1ef8b5 service nova] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Neutron deleted interface a75c5293-2308-41d5-9464-4013af532f66; detaching it from the instance and deleting it from the info cache [ 2027.121241] env[63024]: DEBUG nova.network.neutron [req-360a4ffe-1fb4-442c-a7e1-b6c76812d0fa req-c7afa9fb-b6f2-44aa-a461-61a49e1ef8b5 service nova] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2027.142112] env[63024]: DEBUG nova.network.neutron [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Successfully created port: 7eeea323-6f39-4e16-8603-b463434191f8 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2027.341141] env[63024]: DEBUG nova.scheduler.client.report [None req-4e8b7539-2399-427e-b262-bcb4679c86c0 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2027.473769] env[63024]: DEBUG oslo_vmware.api [None req-d4040e73-c55c-432c-99ce-b4d361589cb6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951603, 'name': PowerOffVM_Task, 'duration_secs': 0.271362} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2027.473915] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4040e73-c55c-432c-99ce-b4d361589cb6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 85f31573-5535-4712-b736-747c43ed74b3] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2027.474236] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d4040e73-c55c-432c-99ce-b4d361589cb6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 85f31573-5535-4712-b736-747c43ed74b3] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2027.474599] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2259a281-a9ac-4fac-a45c-4cb162efcac0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.559457] env[63024]: DEBUG nova.network.neutron [-] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2027.579102] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d4040e73-c55c-432c-99ce-b4d361589cb6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 85f31573-5535-4712-b736-747c43ed74b3] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2027.579457] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d4040e73-c55c-432c-99ce-b4d361589cb6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 85f31573-5535-4712-b736-747c43ed74b3] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2027.579718] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4040e73-c55c-432c-99ce-b4d361589cb6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Deleting the datastore file [datastore1] 85f31573-5535-4712-b736-747c43ed74b3 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2027.580552] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f33b1060-bb21-4ccb-9588-e2a3c51bfc02 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.589964] env[63024]: DEBUG oslo_vmware.api [None req-d4040e73-c55c-432c-99ce-b4d361589cb6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for the task: (returnval){ [ 2027.589964] env[63024]: value = "task-1951605" [ 2027.589964] env[63024]: _type = "Task" [ 2027.589964] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2027.599142] env[63024]: DEBUG oslo_vmware.api [None req-d4040e73-c55c-432c-99ce-b4d361589cb6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951605, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2027.624516] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f8027297-a482-46e3-ab17-924390df1b5c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.634491] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f070216e-f003-4551-b91d-2da55ad6d366 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2027.664613] env[63024]: DEBUG nova.compute.manager [req-360a4ffe-1fb4-442c-a7e1-b6c76812d0fa req-c7afa9fb-b6f2-44aa-a461-61a49e1ef8b5 service nova] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Detach interface failed, port_id=a75c5293-2308-41d5-9464-4013af532f66, reason: Instance 9e32eb32-6eff-4875-b4a3-adfab4647023 could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 2027.849629] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4e8b7539-2399-427e-b262-bcb4679c86c0 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.886s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2027.852343] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c57faf6e-f683-4795-a9cc-69c242c896a2 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 26.605s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2027.852708] env[63024]: DEBUG nova.objects.instance [None req-c57faf6e-f683-4795-a9cc-69c242c896a2 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Lazy-loading 'resources' on Instance uuid 6d21976b-f519-4c87-a0d2-0a406060608d {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2027.879538] env[63024]: INFO nova.scheduler.client.report [None req-4e8b7539-2399-427e-b262-bcb4679c86c0 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Deleted allocations for instance c71abfaa-dc65-4d1b-8a34-dff9dd682fe7 [ 2027.984057] env[63024]: DEBUG nova.compute.manager [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2028.012406] env[63024]: DEBUG nova.virt.hardware [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2028.012717] env[63024]: DEBUG nova.virt.hardware [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2028.012884] env[63024]: DEBUG nova.virt.hardware [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2028.013082] env[63024]: DEBUG nova.virt.hardware [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2028.013233] env[63024]: DEBUG nova.virt.hardware [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2028.013378] env[63024]: DEBUG nova.virt.hardware [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2028.013584] env[63024]: DEBUG nova.virt.hardware [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2028.013742] env[63024]: DEBUG nova.virt.hardware [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2028.013906] env[63024]: DEBUG nova.virt.hardware [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2028.014081] env[63024]: DEBUG nova.virt.hardware [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2028.014258] env[63024]: DEBUG nova.virt.hardware [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2028.015150] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-236c456c-3f87-4641-a8c3-edd736fa0eaa {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.023699] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b91fd49-698c-4946-86ea-0bdaf5b3ee49 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.061243] env[63024]: INFO nova.compute.manager [-] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Took 1.43 seconds to deallocate network for instance. [ 2028.101311] env[63024]: DEBUG oslo_vmware.api [None req-d4040e73-c55c-432c-99ce-b4d361589cb6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Task: {'id': task-1951605, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.177495} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2028.101604] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-d4040e73-c55c-432c-99ce-b4d361589cb6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2028.101807] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d4040e73-c55c-432c-99ce-b4d361589cb6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 85f31573-5535-4712-b736-747c43ed74b3] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2028.101988] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d4040e73-c55c-432c-99ce-b4d361589cb6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 85f31573-5535-4712-b736-747c43ed74b3] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2028.102177] env[63024]: INFO nova.compute.manager [None req-d4040e73-c55c-432c-99ce-b4d361589cb6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] [instance: 85f31573-5535-4712-b736-747c43ed74b3] Took 1.16 seconds to destroy the instance on the hypervisor. [ 2028.102447] env[63024]: DEBUG oslo.service.loopingcall [None req-d4040e73-c55c-432c-99ce-b4d361589cb6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2028.102635] env[63024]: DEBUG nova.compute.manager [-] [instance: 85f31573-5535-4712-b736-747c43ed74b3] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2028.102727] env[63024]: DEBUG nova.network.neutron [-] [instance: 85f31573-5535-4712-b736-747c43ed74b3] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2028.388583] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4e8b7539-2399-427e-b262-bcb4679c86c0 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Lock "c71abfaa-dc65-4d1b-8a34-dff9dd682fe7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.284s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2028.567157] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e8102a09-b524-44d8-a7fc-0e1711db2b45 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2028.796594] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1f46f57-f807-4f94-b01e-7a971f453ec4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.805444] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-446d349b-f5e7-4df3-9486-33dc2044c97f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.836718] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2362b3ee-3c70-4952-a1db-7cb92c2acecc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.845413] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a9342cd-46cb-4b28-872d-d4966b59a184 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2028.860607] env[63024]: DEBUG nova.compute.provider_tree [None req-c57faf6e-f683-4795-a9cc-69c242c896a2 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2028.945746] env[63024]: DEBUG nova.network.neutron [-] [instance: 85f31573-5535-4712-b736-747c43ed74b3] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2029.092865] env[63024]: DEBUG nova.network.neutron [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Successfully updated port: 7eeea323-6f39-4e16-8603-b463434191f8 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2029.177567] env[63024]: DEBUG nova.compute.manager [req-c64d8d5e-172a-4f27-883e-3e93f8359d07 req-bb4b74bf-a298-46d4-8079-d3d6f8dbc406 service nova] [instance: 85f31573-5535-4712-b736-747c43ed74b3] Received event network-vif-deleted-811b1be3-1c07-4661-b336-ef7e0b8b4b54 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2029.177797] env[63024]: DEBUG nova.compute.manager [req-c64d8d5e-172a-4f27-883e-3e93f8359d07 req-bb4b74bf-a298-46d4-8079-d3d6f8dbc406 service nova] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Received event network-vif-plugged-7eeea323-6f39-4e16-8603-b463434191f8 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2029.178018] env[63024]: DEBUG oslo_concurrency.lockutils [req-c64d8d5e-172a-4f27-883e-3e93f8359d07 req-bb4b74bf-a298-46d4-8079-d3d6f8dbc406 service nova] Acquiring lock "3cf2726c-2551-4bbd-8032-006062cdcc39-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2029.178748] env[63024]: DEBUG oslo_concurrency.lockutils [req-c64d8d5e-172a-4f27-883e-3e93f8359d07 req-bb4b74bf-a298-46d4-8079-d3d6f8dbc406 service nova] Lock "3cf2726c-2551-4bbd-8032-006062cdcc39-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2029.179566] env[63024]: DEBUG oslo_concurrency.lockutils [req-c64d8d5e-172a-4f27-883e-3e93f8359d07 req-bb4b74bf-a298-46d4-8079-d3d6f8dbc406 service nova] Lock "3cf2726c-2551-4bbd-8032-006062cdcc39-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2029.179799] env[63024]: DEBUG nova.compute.manager [req-c64d8d5e-172a-4f27-883e-3e93f8359d07 req-bb4b74bf-a298-46d4-8079-d3d6f8dbc406 service nova] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] No waiting events found dispatching network-vif-plugged-7eeea323-6f39-4e16-8603-b463434191f8 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2029.180021] env[63024]: WARNING nova.compute.manager [req-c64d8d5e-172a-4f27-883e-3e93f8359d07 req-bb4b74bf-a298-46d4-8079-d3d6f8dbc406 service nova] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Received unexpected event network-vif-plugged-7eeea323-6f39-4e16-8603-b463434191f8 for instance with vm_state building and task_state spawning. [ 2029.180228] env[63024]: DEBUG nova.compute.manager [req-c64d8d5e-172a-4f27-883e-3e93f8359d07 req-bb4b74bf-a298-46d4-8079-d3d6f8dbc406 service nova] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Received event network-changed-7eeea323-6f39-4e16-8603-b463434191f8 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2029.180462] env[63024]: DEBUG nova.compute.manager [req-c64d8d5e-172a-4f27-883e-3e93f8359d07 req-bb4b74bf-a298-46d4-8079-d3d6f8dbc406 service nova] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Refreshing instance network info cache due to event network-changed-7eeea323-6f39-4e16-8603-b463434191f8. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2029.180700] env[63024]: DEBUG oslo_concurrency.lockutils [req-c64d8d5e-172a-4f27-883e-3e93f8359d07 req-bb4b74bf-a298-46d4-8079-d3d6f8dbc406 service nova] Acquiring lock "refresh_cache-3cf2726c-2551-4bbd-8032-006062cdcc39" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2029.180912] env[63024]: DEBUG oslo_concurrency.lockutils [req-c64d8d5e-172a-4f27-883e-3e93f8359d07 req-bb4b74bf-a298-46d4-8079-d3d6f8dbc406 service nova] Acquired lock "refresh_cache-3cf2726c-2551-4bbd-8032-006062cdcc39" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2029.181190] env[63024]: DEBUG nova.network.neutron [req-c64d8d5e-172a-4f27-883e-3e93f8359d07 req-bb4b74bf-a298-46d4-8079-d3d6f8dbc406 service nova] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Refreshing network info cache for port 7eeea323-6f39-4e16-8603-b463434191f8 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2029.369026] env[63024]: DEBUG nova.scheduler.client.report [None req-c57faf6e-f683-4795-a9cc-69c242c896a2 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2029.447916] env[63024]: INFO nova.compute.manager [-] [instance: 85f31573-5535-4712-b736-747c43ed74b3] Took 1.35 seconds to deallocate network for instance. [ 2029.595797] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquiring lock "refresh_cache-3cf2726c-2551-4bbd-8032-006062cdcc39" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2029.717906] env[63024]: DEBUG nova.network.neutron [req-c64d8d5e-172a-4f27-883e-3e93f8359d07 req-bb4b74bf-a298-46d4-8079-d3d6f8dbc406 service nova] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2029.804631] env[63024]: DEBUG nova.network.neutron [req-c64d8d5e-172a-4f27-883e-3e93f8359d07 req-bb4b74bf-a298-46d4-8079-d3d6f8dbc406 service nova] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2029.872056] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c57faf6e-f683-4795-a9cc-69c242c896a2 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.020s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2029.874426] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.684s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2029.875900] env[63024]: INFO nova.compute.claims [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2029.894783] env[63024]: INFO nova.scheduler.client.report [None req-c57faf6e-f683-4795-a9cc-69c242c896a2 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Deleted allocations for instance 6d21976b-f519-4c87-a0d2-0a406060608d [ 2029.954257] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d4040e73-c55c-432c-99ce-b4d361589cb6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2030.306898] env[63024]: DEBUG oslo_concurrency.lockutils [req-c64d8d5e-172a-4f27-883e-3e93f8359d07 req-bb4b74bf-a298-46d4-8079-d3d6f8dbc406 service nova] Releasing lock "refresh_cache-3cf2726c-2551-4bbd-8032-006062cdcc39" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2030.307392] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquired lock "refresh_cache-3cf2726c-2551-4bbd-8032-006062cdcc39" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2030.307558] env[63024]: DEBUG nova.network.neutron [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2030.402184] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c57faf6e-f683-4795-a9cc-69c242c896a2 tempest-ListServersNegativeTestJSON-2082324958 tempest-ListServersNegativeTestJSON-2082324958-project-member] Lock "6d21976b-f519-4c87-a0d2-0a406060608d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.233s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2030.856713] env[63024]: DEBUG nova.network.neutron [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2031.081147] env[63024]: DEBUG nova.network.neutron [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Updating instance_info_cache with network_info: [{"id": "7eeea323-6f39-4e16-8603-b463434191f8", "address": "fa:16:3e:f1:91:37", "network": {"id": "18684658-e754-4649-b059-43f84e447803", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-48651862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d0715f0ccbd49ec8af8e3049d970994", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98011432-48cc-4ffd-a5a8-b96d2ea4424a", "external-id": "nsx-vlan-transportzone-745", "segmentation_id": 745, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7eeea323-6f", "ovs_interfaceid": "7eeea323-6f39-4e16-8603-b463434191f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2031.170815] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8af17c7-40cf-474a-96bb-70a545393a77 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.184116] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54b0d437-c3f2-4ab3-b9f5-d233be904610 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.221388] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2641b2f0-25ab-4d6e-bd4d-a22baaf8d78a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.229719] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71407072-0590-45cc-9748-389e3279034d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.246449] env[63024]: DEBUG nova.compute.provider_tree [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2031.589055] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Releasing lock "refresh_cache-3cf2726c-2551-4bbd-8032-006062cdcc39" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2031.589055] env[63024]: DEBUG nova.compute.manager [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Instance network_info: |[{"id": "7eeea323-6f39-4e16-8603-b463434191f8", "address": "fa:16:3e:f1:91:37", "network": {"id": "18684658-e754-4649-b059-43f84e447803", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-48651862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d0715f0ccbd49ec8af8e3049d970994", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98011432-48cc-4ffd-a5a8-b96d2ea4424a", "external-id": "nsx-vlan-transportzone-745", "segmentation_id": 745, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7eeea323-6f", "ovs_interfaceid": "7eeea323-6f39-4e16-8603-b463434191f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2031.589055] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f1:91:37', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '98011432-48cc-4ffd-a5a8-b96d2ea4424a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7eeea323-6f39-4e16-8603-b463434191f8', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2031.596030] env[63024]: DEBUG oslo.service.loopingcall [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2031.596030] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2031.596213] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-beb71662-6a74-4a40-b94e-ec867686fe9b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2031.618188] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2031.618188] env[63024]: value = "task-1951606" [ 2031.618188] env[63024]: _type = "Task" [ 2031.618188] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2031.626704] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951606, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2031.749995] env[63024]: DEBUG nova.scheduler.client.report [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2032.129607] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951606, 'name': CreateVM_Task, 'duration_secs': 0.411788} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2032.129831] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2032.131026] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2032.131026] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2032.131222] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2032.131537] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d7c319b1-4665-436f-9a68-3732ed2b0e92 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.137172] env[63024]: DEBUG oslo_vmware.api [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 2032.137172] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5251a8ba-2db2-cdd8-1770-59cbb0242f13" [ 2032.137172] env[63024]: _type = "Task" [ 2032.137172] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2032.146353] env[63024]: DEBUG oslo_vmware.api [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5251a8ba-2db2-cdd8-1770-59cbb0242f13, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2032.255541] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.381s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2032.256183] env[63024]: DEBUG nova.compute.manager [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2032.259238] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4f6acf84-01b1-4e66-9eed-dce0ef58b22e tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.961s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2032.259499] env[63024]: DEBUG nova.objects.instance [None req-4f6acf84-01b1-4e66-9eed-dce0ef58b22e tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Lazy-loading 'resources' on Instance uuid ec1f30e6-8410-4687-958f-f4e6e154b52f {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2032.652105] env[63024]: DEBUG oslo_vmware.api [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5251a8ba-2db2-cdd8-1770-59cbb0242f13, 'name': SearchDatastore_Task, 'duration_secs': 0.012483} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2032.652492] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2032.652760] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2032.653021] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2032.653200] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2032.653350] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2032.653620] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c7d628e1-a7f6-442d-a1e8-b0370934bbbe {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.664068] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2032.664272] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2032.665050] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-07faa157-c9f0-4951-82da-9f5bf8a9230c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.671412] env[63024]: DEBUG oslo_vmware.api [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 2032.671412] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52264b3f-e5a0-2665-0b0c-17a25553429c" [ 2032.671412] env[63024]: _type = "Task" [ 2032.671412] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2032.680629] env[63024]: DEBUG oslo_vmware.api [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52264b3f-e5a0-2665-0b0c-17a25553429c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2032.762328] env[63024]: DEBUG nova.compute.utils [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2032.763875] env[63024]: DEBUG nova.compute.manager [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2032.764551] env[63024]: DEBUG nova.network.neutron [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2032.851890] env[63024]: DEBUG nova.policy [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fbaf8a403e044fe09d90dc38d1a2082c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd55de9f4a3a347dab0b8334e2add113f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 2033.055588] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d4d7f31-830e-4110-b1a7-996f6d007854 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.063608] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05a33bb0-465f-4ff9-81c8-b536d8dfbb5a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.093143] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c3a5466-7b00-411e-9967-1a2cf3ed3e7c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.103026] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-854d2537-6967-4b56-b170-0c1ca95845bc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.113957] env[63024]: DEBUG nova.compute.provider_tree [None req-4f6acf84-01b1-4e66-9eed-dce0ef58b22e tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2033.133709] env[63024]: DEBUG nova.network.neutron [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Successfully created port: a86b6bec-4924-4d56-ace2-6c00a9a8113c {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2033.186021] env[63024]: DEBUG oslo_vmware.api [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52264b3f-e5a0-2665-0b0c-17a25553429c, 'name': SearchDatastore_Task, 'duration_secs': 0.01006} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2033.186021] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-424525a8-5e17-4b94-a91c-736ad8f89fb0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.190849] env[63024]: DEBUG oslo_vmware.api [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 2033.190849] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]523b91b4-9b9e-e1e9-7a36-d3fa92cf3a32" [ 2033.190849] env[63024]: _type = "Task" [ 2033.190849] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2033.200860] env[63024]: DEBUG oslo_vmware.api [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]523b91b4-9b9e-e1e9-7a36-d3fa92cf3a32, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2033.269807] env[63024]: DEBUG nova.compute.manager [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2033.617951] env[63024]: DEBUG nova.scheduler.client.report [None req-4f6acf84-01b1-4e66-9eed-dce0ef58b22e tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2033.703025] env[63024]: DEBUG oslo_vmware.api [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]523b91b4-9b9e-e1e9-7a36-d3fa92cf3a32, 'name': SearchDatastore_Task, 'duration_secs': 0.028112} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2033.703025] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2033.703025] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 3cf2726c-2551-4bbd-8032-006062cdcc39/3cf2726c-2551-4bbd-8032-006062cdcc39.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2033.703025] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e22f8923-ea03-4b0d-9d3c-cc983aeb58cf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2033.710116] env[63024]: DEBUG oslo_vmware.api [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 2033.710116] env[63024]: value = "task-1951607" [ 2033.710116] env[63024]: _type = "Task" [ 2033.710116] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2033.718344] env[63024]: DEBUG oslo_vmware.api [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951607, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2034.124784] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4f6acf84-01b1-4e66-9eed-dce0ef58b22e tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.865s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2034.127425] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 31.658s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2034.127425] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2034.127425] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63024) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2034.127837] env[63024]: DEBUG oslo_concurrency.lockutils [None req-85666f53-2554-4727-8012-c761cf54ebd6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 30.612s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2034.129792] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f45bfb2-0576-48d1-9c6d-82e8ac5d056f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.140276] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfbea3f2-8e8a-46c8-a0fe-2c0c408c33ee {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.157629] env[63024]: INFO nova.scheduler.client.report [None req-4f6acf84-01b1-4e66-9eed-dce0ef58b22e tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Deleted allocations for instance ec1f30e6-8410-4687-958f-f4e6e154b52f [ 2034.159299] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-340b9558-edf4-4971-ad24-753807e23f84 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.171731] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7abfb04b-375d-4196-a64a-01de6e3faac4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.206211] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=178389MB free_disk=170GB free_vcpus=48 pci_devices=None {{(pid=63024) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2034.206461] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2034.221853] env[63024]: DEBUG oslo_vmware.api [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951607, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2034.277939] env[63024]: DEBUG nova.compute.manager [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2034.306976] env[63024]: DEBUG nova.virt.hardware [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2034.307269] env[63024]: DEBUG nova.virt.hardware [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2034.307441] env[63024]: DEBUG nova.virt.hardware [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2034.307670] env[63024]: DEBUG nova.virt.hardware [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2034.307821] env[63024]: DEBUG nova.virt.hardware [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2034.307985] env[63024]: DEBUG nova.virt.hardware [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2034.308212] env[63024]: DEBUG nova.virt.hardware [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2034.308408] env[63024]: DEBUG nova.virt.hardware [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2034.308611] env[63024]: DEBUG nova.virt.hardware [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2034.308815] env[63024]: DEBUG nova.virt.hardware [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2034.308958] env[63024]: DEBUG nova.virt.hardware [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2034.309873] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6046a16-fa44-45e1-bda4-209251b7c7b5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.319469] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-819c0949-c234-4910-bf2a-7e0fb5befa7e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.608153] env[63024]: DEBUG nova.compute.manager [req-b640ccc6-a4ac-4e76-912a-f030068766dd req-206cc0a0-4d83-4341-abec-396520d87164 service nova] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Received event network-vif-plugged-a86b6bec-4924-4d56-ace2-6c00a9a8113c {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2034.608486] env[63024]: DEBUG oslo_concurrency.lockutils [req-b640ccc6-a4ac-4e76-912a-f030068766dd req-206cc0a0-4d83-4341-abec-396520d87164 service nova] Acquiring lock "df5a197c-8e35-44a0-8b9c-63dae50b77ff-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2034.608759] env[63024]: DEBUG oslo_concurrency.lockutils [req-b640ccc6-a4ac-4e76-912a-f030068766dd req-206cc0a0-4d83-4341-abec-396520d87164 service nova] Lock "df5a197c-8e35-44a0-8b9c-63dae50b77ff-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2034.608963] env[63024]: DEBUG oslo_concurrency.lockutils [req-b640ccc6-a4ac-4e76-912a-f030068766dd req-206cc0a0-4d83-4341-abec-396520d87164 service nova] Lock "df5a197c-8e35-44a0-8b9c-63dae50b77ff-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2034.609854] env[63024]: DEBUG nova.compute.manager [req-b640ccc6-a4ac-4e76-912a-f030068766dd req-206cc0a0-4d83-4341-abec-396520d87164 service nova] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] No waiting events found dispatching network-vif-plugged-a86b6bec-4924-4d56-ace2-6c00a9a8113c {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2034.609854] env[63024]: WARNING nova.compute.manager [req-b640ccc6-a4ac-4e76-912a-f030068766dd req-206cc0a0-4d83-4341-abec-396520d87164 service nova] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Received unexpected event network-vif-plugged-a86b6bec-4924-4d56-ace2-6c00a9a8113c for instance with vm_state building and task_state spawning. [ 2034.672222] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4f6acf84-01b1-4e66-9eed-dce0ef58b22e tempest-ServersAaction247Test-1934225695 tempest-ServersAaction247Test-1934225695-project-member] Lock "ec1f30e6-8410-4687-958f-f4e6e154b52f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.772s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2034.728414] env[63024]: DEBUG oslo_vmware.api [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951607, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.735519} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2034.728693] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 3cf2726c-2551-4bbd-8032-006062cdcc39/3cf2726c-2551-4bbd-8032-006062cdcc39.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2034.729178] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2034.729457] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d1a40b72-9f9f-4093-bdb8-383eba0c09af {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.739998] env[63024]: DEBUG oslo_vmware.api [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 2034.739998] env[63024]: value = "task-1951608" [ 2034.739998] env[63024]: _type = "Task" [ 2034.739998] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2034.751424] env[63024]: DEBUG oslo_vmware.api [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951608, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2034.796589] env[63024]: DEBUG nova.network.neutron [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Successfully updated port: a86b6bec-4924-4d56-ace2-6c00a9a8113c {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2034.976039] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ea32420-269b-494f-8b89-0504fb5eb0f5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2034.984810] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67b0bb4c-3f34-483a-b7fe-ce1bf73c03b5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.019489] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a64c86b-4aed-4ce5-a5f1-093b86ff6980 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.028311] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec484497-2990-4cb7-a853-20a50ab8cd17 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.042464] env[63024]: DEBUG nova.compute.provider_tree [None req-85666f53-2554-4727-8012-c761cf54ebd6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2035.251315] env[63024]: DEBUG oslo_vmware.api [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951608, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081977} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2035.251769] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2035.252367] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65cbced6-5320-4918-a2b2-c0db9834a4fa {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.275238] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] 3cf2726c-2551-4bbd-8032-006062cdcc39/3cf2726c-2551-4bbd-8032-006062cdcc39.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2035.275540] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-41a2733d-f98b-46ff-ac80-18710d5b9ec1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.296944] env[63024]: DEBUG oslo_vmware.api [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 2035.296944] env[63024]: value = "task-1951609" [ 2035.296944] env[63024]: _type = "Task" [ 2035.296944] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2035.300493] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Acquiring lock "refresh_cache-df5a197c-8e35-44a0-8b9c-63dae50b77ff" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2035.300640] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Acquired lock "refresh_cache-df5a197c-8e35-44a0-8b9c-63dae50b77ff" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2035.300809] env[63024]: DEBUG nova.network.neutron [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2035.308882] env[63024]: DEBUG oslo_vmware.api [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951609, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2035.545681] env[63024]: DEBUG nova.scheduler.client.report [None req-85666f53-2554-4727-8012-c761cf54ebd6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2035.811722] env[63024]: DEBUG oslo_vmware.api [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951609, 'name': ReconfigVM_Task, 'duration_secs': 0.310035} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2035.811999] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Reconfigured VM instance instance-00000063 to attach disk [datastore1] 3cf2726c-2551-4bbd-8032-006062cdcc39/3cf2726c-2551-4bbd-8032-006062cdcc39.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2035.813488] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e412be44-a139-4054-a3ec-de7abc010fb9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2035.820895] env[63024]: DEBUG oslo_vmware.api [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 2035.820895] env[63024]: value = "task-1951610" [ 2035.820895] env[63024]: _type = "Task" [ 2035.820895] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2035.830021] env[63024]: DEBUG oslo_vmware.api [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951610, 'name': Rename_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2035.845484] env[63024]: DEBUG nova.network.neutron [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2036.076478] env[63024]: DEBUG nova.network.neutron [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Updating instance_info_cache with network_info: [{"id": "a86b6bec-4924-4d56-ace2-6c00a9a8113c", "address": "fa:16:3e:8f:5c:a2", "network": {"id": "1a612d95-706b-463f-b8e5-4d39625bf560", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1640035677-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d55de9f4a3a347dab0b8334e2add113f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8812601-ae67-4e0d-b9a2-710b86c53ac5", "external-id": "nsx-vlan-transportzone-85", "segmentation_id": 85, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa86b6bec-49", "ovs_interfaceid": "a86b6bec-4924-4d56-ace2-6c00a9a8113c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2036.333065] env[63024]: DEBUG oslo_vmware.api [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951610, 'name': Rename_Task, 'duration_secs': 0.156485} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2036.333357] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2036.333670] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f67930c7-074d-4bf5-bb34-1d81b9facee5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.341433] env[63024]: DEBUG oslo_vmware.api [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 2036.341433] env[63024]: value = "task-1951611" [ 2036.341433] env[63024]: _type = "Task" [ 2036.341433] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2036.349861] env[63024]: DEBUG oslo_vmware.api [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951611, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2036.559021] env[63024]: DEBUG oslo_concurrency.lockutils [None req-85666f53-2554-4727-8012-c761cf54ebd6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.429s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2036.559352] env[63024]: DEBUG nova.compute.manager [None req-85666f53-2554-4727-8012-c761cf54ebd6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Resized/migrated instance is powered off. Setting vm_state to 'stopped'. {{(pid=63024) _confirm_resize /opt/stack/nova/nova/compute/manager.py:5239}} [ 2036.563590] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2be02205-f9a8-48b7-bbd5-e6b506acadd1 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.592s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2036.567027] env[63024]: DEBUG nova.objects.instance [None req-2be02205-f9a8-48b7-bbd5-e6b506acadd1 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lazy-loading 'resources' on Instance uuid c28e7c21-7e7d-4cda-81e8-63538bd8a1f7 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2036.579839] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Releasing lock "refresh_cache-df5a197c-8e35-44a0-8b9c-63dae50b77ff" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2036.581594] env[63024]: DEBUG nova.compute.manager [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Instance network_info: |[{"id": "a86b6bec-4924-4d56-ace2-6c00a9a8113c", "address": "fa:16:3e:8f:5c:a2", "network": {"id": "1a612d95-706b-463f-b8e5-4d39625bf560", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1640035677-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d55de9f4a3a347dab0b8334e2add113f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8812601-ae67-4e0d-b9a2-710b86c53ac5", "external-id": "nsx-vlan-transportzone-85", "segmentation_id": 85, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa86b6bec-49", "ovs_interfaceid": "a86b6bec-4924-4d56-ace2-6c00a9a8113c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2036.581594] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8f:5c:a2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8812601-ae67-4e0d-b9a2-710b86c53ac5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a86b6bec-4924-4d56-ace2-6c00a9a8113c', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2036.591345] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Creating folder: Project (d55de9f4a3a347dab0b8334e2add113f). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2036.592482] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c553ae4a-7106-4a99-b2e5-fc957ff49344 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.607366] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Created folder: Project (d55de9f4a3a347dab0b8334e2add113f) in parent group-v401959. [ 2036.607366] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Creating folder: Instances. Parent ref: group-v402226. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2036.608025] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-788eca79-9035-4c65-9ffe-f7f353fc324e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.621414] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Created folder: Instances in parent group-v402226. [ 2036.621414] env[63024]: DEBUG oslo.service.loopingcall [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2036.621612] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2036.621831] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-27724780-648e-4c55-9161-1c1050fd10ce {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2036.646848] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2036.646848] env[63024]: value = "task-1951614" [ 2036.646848] env[63024]: _type = "Task" [ 2036.646848] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2036.660500] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951614, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2036.721886] env[63024]: DEBUG nova.compute.manager [req-2482c33c-90b3-4099-8780-4bf2f1707757 req-e4f9abcc-31fd-4d3a-a4dc-d7d7e3937628 service nova] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Received event network-changed-a86b6bec-4924-4d56-ace2-6c00a9a8113c {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2036.722104] env[63024]: DEBUG nova.compute.manager [req-2482c33c-90b3-4099-8780-4bf2f1707757 req-e4f9abcc-31fd-4d3a-a4dc-d7d7e3937628 service nova] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Refreshing instance network info cache due to event network-changed-a86b6bec-4924-4d56-ace2-6c00a9a8113c. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2036.722382] env[63024]: DEBUG oslo_concurrency.lockutils [req-2482c33c-90b3-4099-8780-4bf2f1707757 req-e4f9abcc-31fd-4d3a-a4dc-d7d7e3937628 service nova] Acquiring lock "refresh_cache-df5a197c-8e35-44a0-8b9c-63dae50b77ff" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2036.722589] env[63024]: DEBUG oslo_concurrency.lockutils [req-2482c33c-90b3-4099-8780-4bf2f1707757 req-e4f9abcc-31fd-4d3a-a4dc-d7d7e3937628 service nova] Acquired lock "refresh_cache-df5a197c-8e35-44a0-8b9c-63dae50b77ff" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2036.722791] env[63024]: DEBUG nova.network.neutron [req-2482c33c-90b3-4099-8780-4bf2f1707757 req-e4f9abcc-31fd-4d3a-a4dc-d7d7e3937628 service nova] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Refreshing network info cache for port a86b6bec-4924-4d56-ace2-6c00a9a8113c {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2036.854411] env[63024]: DEBUG oslo_vmware.api [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951611, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2037.134695] env[63024]: INFO nova.scheduler.client.report [None req-85666f53-2554-4727-8012-c761cf54ebd6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Deleted allocation for migration 8ae5334b-067b-4768-9988-eb18b89ae1b7 [ 2037.158238] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951614, 'name': CreateVM_Task, 'duration_secs': 0.501811} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2037.158830] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2037.159111] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2037.159278] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2037.159590] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2037.159845] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe9685ea-4d9e-4f85-913a-917311409bf7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.165026] env[63024]: DEBUG oslo_vmware.api [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Waiting for the task: (returnval){ [ 2037.165026] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52f723d0-b76f-215d-a810-eacee005f46b" [ 2037.165026] env[63024]: _type = "Task" [ 2037.165026] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2037.174863] env[63024]: DEBUG oslo_vmware.api [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52f723d0-b76f-215d-a810-eacee005f46b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2037.350971] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c201aa80-a0f9-417a-9c3f-a9b7a783455d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.356893] env[63024]: DEBUG oslo_vmware.api [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951611, 'name': PowerOnVM_Task, 'duration_secs': 0.521263} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2037.357717] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2037.358100] env[63024]: INFO nova.compute.manager [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Took 9.37 seconds to spawn the instance on the hypervisor. [ 2037.358160] env[63024]: DEBUG nova.compute.manager [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2037.358896] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abf64222-ff7f-4b64-ab8e-8a689302a08e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.364125] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f64b0232-ef3a-4b0a-a7dd-a8f42771bb01 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.409717] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ecbeffd-d481-421a-8398-06a75a1cc896 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.420215] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7726583d-cb0d-47dd-8052-904b60b4a5c6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.433852] env[63024]: DEBUG nova.compute.provider_tree [None req-2be02205-f9a8-48b7-bbd5-e6b506acadd1 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2037.537776] env[63024]: DEBUG nova.network.neutron [req-2482c33c-90b3-4099-8780-4bf2f1707757 req-e4f9abcc-31fd-4d3a-a4dc-d7d7e3937628 service nova] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Updated VIF entry in instance network info cache for port a86b6bec-4924-4d56-ace2-6c00a9a8113c. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2037.538167] env[63024]: DEBUG nova.network.neutron [req-2482c33c-90b3-4099-8780-4bf2f1707757 req-e4f9abcc-31fd-4d3a-a4dc-d7d7e3937628 service nova] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Updating instance_info_cache with network_info: [{"id": "a86b6bec-4924-4d56-ace2-6c00a9a8113c", "address": "fa:16:3e:8f:5c:a2", "network": {"id": "1a612d95-706b-463f-b8e5-4d39625bf560", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1640035677-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d55de9f4a3a347dab0b8334e2add113f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8812601-ae67-4e0d-b9a2-710b86c53ac5", "external-id": "nsx-vlan-transportzone-85", "segmentation_id": 85, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa86b6bec-49", "ovs_interfaceid": "a86b6bec-4924-4d56-ace2-6c00a9a8113c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2037.639887] env[63024]: DEBUG oslo_concurrency.lockutils [None req-85666f53-2554-4727-8012-c761cf54ebd6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "fe6847e2-a742-4338-983f-698c13aaefde" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 37.328s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2037.679842] env[63024]: DEBUG oslo_vmware.api [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52f723d0-b76f-215d-a810-eacee005f46b, 'name': SearchDatastore_Task, 'duration_secs': 0.011266} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2037.680401] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2037.680754] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2037.681221] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2037.681479] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2037.681858] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2037.682223] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5d511ff4-f270-405d-864e-046e19820cb8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.693139] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2037.693380] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2037.694129] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8cca56bb-e610-4b53-814e-99a262765675 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2037.700502] env[63024]: DEBUG oslo_vmware.api [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Waiting for the task: (returnval){ [ 2037.700502] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]520850db-1391-7b96-6818-b70743f298f1" [ 2037.700502] env[63024]: _type = "Task" [ 2037.700502] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2037.709251] env[63024]: DEBUG oslo_vmware.api [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]520850db-1391-7b96-6818-b70743f298f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2037.888023] env[63024]: INFO nova.compute.manager [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Took 40.83 seconds to build instance. [ 2037.940782] env[63024]: DEBUG nova.scheduler.client.report [None req-2be02205-f9a8-48b7-bbd5-e6b506acadd1 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2038.041155] env[63024]: DEBUG oslo_concurrency.lockutils [req-2482c33c-90b3-4099-8780-4bf2f1707757 req-e4f9abcc-31fd-4d3a-a4dc-d7d7e3937628 service nova] Releasing lock "refresh_cache-df5a197c-8e35-44a0-8b9c-63dae50b77ff" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2038.215134] env[63024]: DEBUG oslo_vmware.api [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]520850db-1391-7b96-6818-b70743f298f1, 'name': SearchDatastore_Task, 'duration_secs': 0.012165} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2038.215276] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0197d99f-d634-4d60-b63e-776bc18c01b2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.222250] env[63024]: DEBUG oslo_vmware.api [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Waiting for the task: (returnval){ [ 2038.222250] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52756c03-c1dd-43ce-2f24-0ec48b1362eb" [ 2038.222250] env[63024]: _type = "Task" [ 2038.222250] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2038.231410] env[63024]: DEBUG oslo_vmware.api [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52756c03-c1dd-43ce-2f24-0ec48b1362eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2038.392489] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1d8e2ae8-f84b-4a8e-9a1f-fcb457dedd2a tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "3cf2726c-2551-4bbd-8032-006062cdcc39" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.343s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2038.450679] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2be02205-f9a8-48b7-bbd5-e6b506acadd1 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.887s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2038.455850] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 28.363s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2038.483340] env[63024]: INFO nova.scheduler.client.report [None req-2be02205-f9a8-48b7-bbd5-e6b506acadd1 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Deleted allocations for instance c28e7c21-7e7d-4cda-81e8-63538bd8a1f7 [ 2038.611729] env[63024]: DEBUG nova.objects.instance [None req-b6086a9f-6a6b-4617-9d55-0b48d51a2e99 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lazy-loading 'flavor' on Instance uuid fe6847e2-a742-4338-983f-698c13aaefde {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2038.733333] env[63024]: DEBUG oslo_vmware.api [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52756c03-c1dd-43ce-2f24-0ec48b1362eb, 'name': SearchDatastore_Task, 'duration_secs': 0.011129} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2038.733630] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2038.733893] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] df5a197c-8e35-44a0-8b9c-63dae50b77ff/df5a197c-8e35-44a0-8b9c-63dae50b77ff.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2038.734175] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6f46ea8c-634d-404c-bc0c-2e50a08203d1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.743258] env[63024]: DEBUG oslo_vmware.api [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Waiting for the task: (returnval){ [ 2038.743258] env[63024]: value = "task-1951615" [ 2038.743258] env[63024]: _type = "Task" [ 2038.743258] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2038.752204] env[63024]: DEBUG oslo_vmware.api [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Task: {'id': task-1951615, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2038.970109] env[63024]: INFO nova.compute.claims [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2038.993080] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2be02205-f9a8-48b7-bbd5-e6b506acadd1 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "c28e7c21-7e7d-4cda-81e8-63538bd8a1f7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.450s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2039.119918] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b6086a9f-6a6b-4617-9d55-0b48d51a2e99 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "refresh_cache-fe6847e2-a742-4338-983f-698c13aaefde" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2039.119918] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b6086a9f-6a6b-4617-9d55-0b48d51a2e99 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquired lock "refresh_cache-fe6847e2-a742-4338-983f-698c13aaefde" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2039.119918] env[63024]: DEBUG nova.network.neutron [None req-b6086a9f-6a6b-4617-9d55-0b48d51a2e99 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2039.119918] env[63024]: DEBUG nova.objects.instance [None req-b6086a9f-6a6b-4617-9d55-0b48d51a2e99 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lazy-loading 'info_cache' on Instance uuid fe6847e2-a742-4338-983f-698c13aaefde {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2039.259351] env[63024]: DEBUG oslo_vmware.api [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Task: {'id': task-1951615, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.512964} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2039.261158] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] df5a197c-8e35-44a0-8b9c-63dae50b77ff/df5a197c-8e35-44a0-8b9c-63dae50b77ff.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2039.261158] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2039.261158] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-86b999fb-5b46-4818-b825-e9f921d92f61 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.272814] env[63024]: DEBUG oslo_vmware.api [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Waiting for the task: (returnval){ [ 2039.272814] env[63024]: value = "task-1951616" [ 2039.272814] env[63024]: _type = "Task" [ 2039.272814] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2039.283759] env[63024]: DEBUG oslo_vmware.api [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Task: {'id': task-1951616, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2039.476041] env[63024]: INFO nova.compute.resource_tracker [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Updating resource usage from migration b427e72e-174f-4ebf-b6c0-a9329bf591f0 [ 2039.622613] env[63024]: DEBUG nova.objects.base [None req-b6086a9f-6a6b-4617-9d55-0b48d51a2e99 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=63024) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2039.785610] env[63024]: DEBUG oslo_vmware.api [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Task: {'id': task-1951616, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072455} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2039.787714] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2039.788560] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7895574-aed1-49d9-8d7f-9bde97a696b0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.792367] env[63024]: DEBUG nova.compute.manager [req-1ae34387-fa1d-447c-9c27-efa3a83d8558 req-4dfca6b3-d4cc-4547-b063-c3e211099c0d service nova] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Received event network-changed-7eeea323-6f39-4e16-8603-b463434191f8 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2039.792367] env[63024]: DEBUG nova.compute.manager [req-1ae34387-fa1d-447c-9c27-efa3a83d8558 req-4dfca6b3-d4cc-4547-b063-c3e211099c0d service nova] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Refreshing instance network info cache due to event network-changed-7eeea323-6f39-4e16-8603-b463434191f8. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2039.792367] env[63024]: DEBUG oslo_concurrency.lockutils [req-1ae34387-fa1d-447c-9c27-efa3a83d8558 req-4dfca6b3-d4cc-4547-b063-c3e211099c0d service nova] Acquiring lock "refresh_cache-3cf2726c-2551-4bbd-8032-006062cdcc39" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2039.792685] env[63024]: DEBUG oslo_concurrency.lockutils [req-1ae34387-fa1d-447c-9c27-efa3a83d8558 req-4dfca6b3-d4cc-4547-b063-c3e211099c0d service nova] Acquired lock "refresh_cache-3cf2726c-2551-4bbd-8032-006062cdcc39" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2039.792885] env[63024]: DEBUG nova.network.neutron [req-1ae34387-fa1d-447c-9c27-efa3a83d8558 req-4dfca6b3-d4cc-4547-b063-c3e211099c0d service nova] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Refreshing network info cache for port 7eeea323-6f39-4e16-8603-b463434191f8 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2039.797118] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2edea8e0-7045-4bb0-bdcd-0ab445139a26 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.816056] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57ec838e-8a81-467e-8244-523a9a654d5a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.828268] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Reconfiguring VM instance instance-00000064 to attach disk [datastore1] df5a197c-8e35-44a0-8b9c-63dae50b77ff/df5a197c-8e35-44a0-8b9c-63dae50b77ff.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2039.829875] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-489e388e-3654-47e3-82a6-f7df65d2fb4d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.875712] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6378ecf6-70b3-48fa-99fd-39b950ea9869 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.880054] env[63024]: DEBUG oslo_vmware.api [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Waiting for the task: (returnval){ [ 2039.880054] env[63024]: value = "task-1951617" [ 2039.880054] env[63024]: _type = "Task" [ 2039.880054] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2039.891113] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f55efc1-1489-4274-b775-2e9f5213af76 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.896137] env[63024]: DEBUG oslo_vmware.api [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Task: {'id': task-1951617, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2039.909105] env[63024]: DEBUG nova.compute.provider_tree [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2040.393641] env[63024]: DEBUG oslo_vmware.api [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Task: {'id': task-1951617, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2040.418058] env[63024]: DEBUG nova.scheduler.client.report [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2040.626301] env[63024]: DEBUG nova.network.neutron [None req-b6086a9f-6a6b-4617-9d55-0b48d51a2e99 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Updating instance_info_cache with network_info: [{"id": "a86b5113-d05e-45ac-bf54-833ea769eae5", "address": "fa:16:3e:7e:0d:a2", "network": {"id": "27687c23-521d-4beb-ad4f-278994149c2c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-381619610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.138", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e53c02ad56640dc8cbc8839669b67bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "09bf081b-cdf0-4977-abe2-2339a87409ab", "external-id": "nsx-vlan-transportzone-378", "segmentation_id": 378, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa86b5113-d0", "ovs_interfaceid": "a86b5113-d05e-45ac-bf54-833ea769eae5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2040.799455] env[63024]: DEBUG nova.network.neutron [req-1ae34387-fa1d-447c-9c27-efa3a83d8558 req-4dfca6b3-d4cc-4547-b063-c3e211099c0d service nova] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Updated VIF entry in instance network info cache for port 7eeea323-6f39-4e16-8603-b463434191f8. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2040.799849] env[63024]: DEBUG nova.network.neutron [req-1ae34387-fa1d-447c-9c27-efa3a83d8558 req-4dfca6b3-d4cc-4547-b063-c3e211099c0d service nova] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Updating instance_info_cache with network_info: [{"id": "7eeea323-6f39-4e16-8603-b463434191f8", "address": "fa:16:3e:f1:91:37", "network": {"id": "18684658-e754-4649-b059-43f84e447803", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-48651862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.204", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d0715f0ccbd49ec8af8e3049d970994", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98011432-48cc-4ffd-a5a8-b96d2ea4424a", "external-id": "nsx-vlan-transportzone-745", "segmentation_id": 745, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7eeea323-6f", "ovs_interfaceid": "7eeea323-6f39-4e16-8603-b463434191f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2040.892809] env[63024]: DEBUG oslo_vmware.api [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Task: {'id': task-1951617, 'name': ReconfigVM_Task, 'duration_secs': 0.944511} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2040.893216] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Reconfigured VM instance instance-00000064 to attach disk [datastore1] df5a197c-8e35-44a0-8b9c-63dae50b77ff/df5a197c-8e35-44a0-8b9c-63dae50b77ff.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2040.894061] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c4cb7ad5-354d-4927-ad1f-7b73c050967f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.901298] env[63024]: DEBUG oslo_vmware.api [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Waiting for the task: (returnval){ [ 2040.901298] env[63024]: value = "task-1951618" [ 2040.901298] env[63024]: _type = "Task" [ 2040.901298] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2040.909202] env[63024]: DEBUG oslo_vmware.api [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Task: {'id': task-1951618, 'name': Rename_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2040.921949] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.466s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2040.921949] env[63024]: INFO nova.compute.manager [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Migrating [ 2040.932628] env[63024]: DEBUG oslo_concurrency.lockutils [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.590s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2040.932867] env[63024]: DEBUG nova.objects.instance [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Lazy-loading 'pci_requests' on Instance uuid 92d1f96e-bbe7-4654-9d3a-47ba40057157 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2041.130020] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b6086a9f-6a6b-4617-9d55-0b48d51a2e99 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Releasing lock "refresh_cache-fe6847e2-a742-4338-983f-698c13aaefde" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2041.208257] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "0d253199-adf8-45c0-a6bf-b11c12b08688" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2041.208485] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "0d253199-adf8-45c0-a6bf-b11c12b08688" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2041.303717] env[63024]: DEBUG oslo_concurrency.lockutils [req-1ae34387-fa1d-447c-9c27-efa3a83d8558 req-4dfca6b3-d4cc-4547-b063-c3e211099c0d service nova] Releasing lock "refresh_cache-3cf2726c-2551-4bbd-8032-006062cdcc39" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2041.411045] env[63024]: DEBUG oslo_vmware.api [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Task: {'id': task-1951618, 'name': Rename_Task, 'duration_secs': 0.500816} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2041.411379] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2041.411649] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a570ea12-4db2-48b7-b76d-b1dfcd734cd2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.418528] env[63024]: DEBUG oslo_vmware.api [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Waiting for the task: (returnval){ [ 2041.418528] env[63024]: value = "task-1951619" [ 2041.418528] env[63024]: _type = "Task" [ 2041.418528] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2041.429731] env[63024]: DEBUG oslo_vmware.api [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Task: {'id': task-1951619, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2041.436552] env[63024]: DEBUG nova.objects.instance [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Lazy-loading 'numa_topology' on Instance uuid 92d1f96e-bbe7-4654-9d3a-47ba40057157 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2041.439470] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "refresh_cache-9716d592-32d1-4f1d-b42b-1c8a7d81d2f2" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2041.439561] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquired lock "refresh_cache-9716d592-32d1-4f1d-b42b-1c8a7d81d2f2" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2041.439758] env[63024]: DEBUG nova.network.neutron [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2041.710849] env[63024]: DEBUG nova.compute.manager [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2041.929389] env[63024]: DEBUG oslo_vmware.api [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Task: {'id': task-1951619, 'name': PowerOnVM_Task, 'duration_secs': 0.456326} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2041.929607] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2041.929813] env[63024]: INFO nova.compute.manager [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Took 7.65 seconds to spawn the instance on the hypervisor. [ 2041.930050] env[63024]: DEBUG nova.compute.manager [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2041.930763] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e587aaa4-f148-4855-b582-3414a5b75995 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2041.939662] env[63024]: INFO nova.compute.claims [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2042.137080] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6086a9f-6a6b-4617-9d55-0b48d51a2e99 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2042.137421] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f179e659-7a9c-4518-983c-0812bc3381e7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2042.147915] env[63024]: DEBUG oslo_vmware.api [None req-b6086a9f-6a6b-4617-9d55-0b48d51a2e99 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2042.147915] env[63024]: value = "task-1951620" [ 2042.147915] env[63024]: _type = "Task" [ 2042.147915] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2042.157412] env[63024]: DEBUG oslo_vmware.api [None req-b6086a9f-6a6b-4617-9d55-0b48d51a2e99 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951620, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2042.236580] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2042.295550] env[63024]: DEBUG nova.network.neutron [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Updating instance_info_cache with network_info: [{"id": "6e0e9732-b318-4b20-ad72-8c2bc07eaf34", "address": "fa:16:3e:2b:cc:65", "network": {"id": "0cbe22f7-6322-4d92-9a77-2753f6449a2d", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-366684254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.141", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6072e8931d9540ad8fe4a2b4b1ec782d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3ccbdbb-8b49-4a26-913f-2a448b72280f", "external-id": "nsx-vlan-transportzone-412", "segmentation_id": 412, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e0e9732-b3", "ovs_interfaceid": "6e0e9732-b318-4b20-ad72-8c2bc07eaf34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2042.448361] env[63024]: INFO nova.compute.manager [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Took 40.28 seconds to build instance. [ 2042.659290] env[63024]: DEBUG oslo_vmware.api [None req-b6086a9f-6a6b-4617-9d55-0b48d51a2e99 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951620, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2042.798015] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Releasing lock "refresh_cache-9716d592-32d1-4f1d-b42b-1c8a7d81d2f2" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2042.951700] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c7cdaf5c-2e63-4709-8975-ac0c68052509 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Lock "df5a197c-8e35-44a0-8b9c-63dae50b77ff" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.791s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2043.170964] env[63024]: DEBUG oslo_vmware.api [None req-b6086a9f-6a6b-4617-9d55-0b48d51a2e99 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951620, 'name': PowerOnVM_Task, 'duration_secs': 0.794797} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2043.176107] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6086a9f-6a6b-4617-9d55-0b48d51a2e99 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2043.176502] env[63024]: DEBUG nova.compute.manager [None req-b6086a9f-6a6b-4617-9d55-0b48d51a2e99 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2043.178413] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffb40b2f-e14f-4cea-a51d-350dcd336a17 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.284406] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5721a3c5-619f-4791-b8af-3040ef3a9475 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Acquiring lock "df5a197c-8e35-44a0-8b9c-63dae50b77ff" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2043.284406] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5721a3c5-619f-4791-b8af-3040ef3a9475 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Lock "df5a197c-8e35-44a0-8b9c-63dae50b77ff" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2043.284619] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5721a3c5-619f-4791-b8af-3040ef3a9475 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Acquiring lock "df5a197c-8e35-44a0-8b9c-63dae50b77ff-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2043.284845] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5721a3c5-619f-4791-b8af-3040ef3a9475 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Lock "df5a197c-8e35-44a0-8b9c-63dae50b77ff-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2043.285151] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5721a3c5-619f-4791-b8af-3040ef3a9475 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Lock "df5a197c-8e35-44a0-8b9c-63dae50b77ff-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2043.287639] env[63024]: INFO nova.compute.manager [None req-5721a3c5-619f-4791-b8af-3040ef3a9475 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Terminating instance [ 2043.312018] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7edca2ab-1a64-463f-ab80-ea9e754f4746 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.320215] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a35a7927-52b1-4c17-ad20-e80ab0a1ab57 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.354828] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95f7ffb7-0b8e-44c5-8d20-8a67124e1992 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.364051] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5649b8d-93ff-45aa-96d5-67549fd1d36c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.380604] env[63024]: DEBUG nova.compute.provider_tree [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2043.795177] env[63024]: DEBUG nova.compute.manager [None req-5721a3c5-619f-4791-b8af-3040ef3a9475 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2043.795515] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5721a3c5-619f-4791-b8af-3040ef3a9475 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2043.796327] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0095d6a-8517-4f5b-b373-a188e21c05ec {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.809826] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-5721a3c5-619f-4791-b8af-3040ef3a9475 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2043.810301] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-22a1f86a-a1ef-48db-9904-90fdc5388ccc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2043.819877] env[63024]: DEBUG oslo_vmware.api [None req-5721a3c5-619f-4791-b8af-3040ef3a9475 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Waiting for the task: (returnval){ [ 2043.819877] env[63024]: value = "task-1951621" [ 2043.819877] env[63024]: _type = "Task" [ 2043.819877] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2043.831175] env[63024]: DEBUG oslo_vmware.api [None req-5721a3c5-619f-4791-b8af-3040ef3a9475 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Task: {'id': task-1951621, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2043.884065] env[63024]: DEBUG nova.scheduler.client.report [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2044.317111] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa73d6e9-d13f-4127-aa1c-cd0d7e1a5411 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.339297] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Updating instance '9716d592-32d1-4f1d-b42b-1c8a7d81d2f2' progress to 0 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2044.350323] env[63024]: DEBUG oslo_vmware.api [None req-5721a3c5-619f-4791-b8af-3040ef3a9475 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Task: {'id': task-1951621, 'name': PowerOffVM_Task, 'duration_secs': 0.219982} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2044.350737] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-5721a3c5-619f-4791-b8af-3040ef3a9475 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2044.351051] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5721a3c5-619f-4791-b8af-3040ef3a9475 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2044.351327] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dc87566a-a5fe-4130-aff9-6110d71ed357 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.389369] env[63024]: DEBUG oslo_concurrency.lockutils [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.457s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2044.392710] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3faa85ed-242c-42ff-afb4-990743c934db tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.283s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2044.392952] env[63024]: DEBUG nova.objects.instance [None req-3faa85ed-242c-42ff-afb4-990743c934db tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lazy-loading 'resources' on Instance uuid 73db94b8-cfa8-4457-bccb-d4b780edbd93 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2044.465614] env[63024]: INFO nova.network.neutron [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Updating port 05bc00c8-444d-425a-8c1e-0d34f269c7e8 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 2044.532792] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5721a3c5-619f-4791-b8af-3040ef3a9475 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2044.533140] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5721a3c5-619f-4791-b8af-3040ef3a9475 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2044.533345] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-5721a3c5-619f-4791-b8af-3040ef3a9475 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Deleting the datastore file [datastore1] df5a197c-8e35-44a0-8b9c-63dae50b77ff {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2044.533633] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-498df2f5-ab3f-4a26-9c21-5501eb51e61c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.542099] env[63024]: DEBUG oslo_vmware.api [None req-5721a3c5-619f-4791-b8af-3040ef3a9475 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Waiting for the task: (returnval){ [ 2044.542099] env[63024]: value = "task-1951623" [ 2044.542099] env[63024]: _type = "Task" [ 2044.542099] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2044.551994] env[63024]: DEBUG oslo_vmware.api [None req-5721a3c5-619f-4791-b8af-3040ef3a9475 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Task: {'id': task-1951623, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2044.846141] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2044.846572] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-be2206d4-0db4-45b0-9bb1-51e1cd7bb07c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2044.856167] env[63024]: DEBUG oslo_vmware.api [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 2044.856167] env[63024]: value = "task-1951624" [ 2044.856167] env[63024]: _type = "Task" [ 2044.856167] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2044.865773] env[63024]: DEBUG oslo_vmware.api [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951624, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2044.902622] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b2a1a45a-d2e8-4fb5-81fb-4155d202aa5e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "fe6847e2-a742-4338-983f-698c13aaefde" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2044.903053] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b2a1a45a-d2e8-4fb5-81fb-4155d202aa5e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "fe6847e2-a742-4338-983f-698c13aaefde" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2044.903389] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b2a1a45a-d2e8-4fb5-81fb-4155d202aa5e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "fe6847e2-a742-4338-983f-698c13aaefde-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2044.903665] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b2a1a45a-d2e8-4fb5-81fb-4155d202aa5e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "fe6847e2-a742-4338-983f-698c13aaefde-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2044.903881] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b2a1a45a-d2e8-4fb5-81fb-4155d202aa5e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "fe6847e2-a742-4338-983f-698c13aaefde-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2044.906447] env[63024]: INFO nova.compute.manager [None req-b2a1a45a-d2e8-4fb5-81fb-4155d202aa5e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Terminating instance [ 2045.053243] env[63024]: DEBUG oslo_vmware.api [None req-5721a3c5-619f-4791-b8af-3040ef3a9475 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Task: {'id': task-1951623, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149386} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2045.056196] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-5721a3c5-619f-4791-b8af-3040ef3a9475 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2045.056422] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5721a3c5-619f-4791-b8af-3040ef3a9475 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2045.056811] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5721a3c5-619f-4791-b8af-3040ef3a9475 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2045.056811] env[63024]: INFO nova.compute.manager [None req-5721a3c5-619f-4791-b8af-3040ef3a9475 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Took 1.26 seconds to destroy the instance on the hypervisor. [ 2045.056987] env[63024]: DEBUG oslo.service.loopingcall [None req-5721a3c5-619f-4791-b8af-3040ef3a9475 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2045.057389] env[63024]: DEBUG nova.compute.manager [-] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2045.057491] env[63024]: DEBUG nova.network.neutron [-] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2045.188779] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-397cc9db-b88f-4329-8193-c01242eef4b5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.198309] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01b744ba-c825-4fd9-8965-fd371bd8ea88 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.235199] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4d6b432-98b7-472f-a9fd-98a96d19f70c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.243812] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed0d2f1d-0137-4c3c-b270-eabec98acda7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.259254] env[63024]: DEBUG nova.compute.provider_tree [None req-3faa85ed-242c-42ff-afb4-990743c934db tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2045.369623] env[63024]: DEBUG oslo_vmware.api [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951624, 'name': PowerOffVM_Task, 'duration_secs': 0.205504} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2045.369772] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2045.370051] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Updating instance '9716d592-32d1-4f1d-b42b-1c8a7d81d2f2' progress to 17 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2045.411603] env[63024]: DEBUG nova.compute.manager [None req-b2a1a45a-d2e8-4fb5-81fb-4155d202aa5e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2045.411603] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b2a1a45a-d2e8-4fb5-81fb-4155d202aa5e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2045.412121] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56fbfccd-b206-4bfc-bb9a-e8788e1075b3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.423579] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2a1a45a-d2e8-4fb5-81fb-4155d202aa5e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2045.423888] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-209080f6-202f-4a84-a289-b34a7944380f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.432942] env[63024]: DEBUG oslo_vmware.api [None req-b2a1a45a-d2e8-4fb5-81fb-4155d202aa5e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2045.432942] env[63024]: value = "task-1951625" [ 2045.432942] env[63024]: _type = "Task" [ 2045.432942] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2045.442154] env[63024]: DEBUG oslo_vmware.api [None req-b2a1a45a-d2e8-4fb5-81fb-4155d202aa5e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951625, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2045.448270] env[63024]: DEBUG nova.compute.manager [req-4aa0eae8-0f42-41a2-bb4f-80f7bd45f35c req-31df75e1-10dc-4d88-99e0-68ea961926ea service nova] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Received event network-vif-deleted-a86b6bec-4924-4d56-ace2-6c00a9a8113c {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2045.448525] env[63024]: INFO nova.compute.manager [req-4aa0eae8-0f42-41a2-bb4f-80f7bd45f35c req-31df75e1-10dc-4d88-99e0-68ea961926ea service nova] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Neutron deleted interface a86b6bec-4924-4d56-ace2-6c00a9a8113c; detaching it from the instance and deleting it from the info cache [ 2045.448750] env[63024]: DEBUG nova.network.neutron [req-4aa0eae8-0f42-41a2-bb4f-80f7bd45f35c req-31df75e1-10dc-4d88-99e0-68ea961926ea service nova] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2045.763015] env[63024]: DEBUG nova.scheduler.client.report [None req-3faa85ed-242c-42ff-afb4-990743c934db tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2045.878357] env[63024]: DEBUG nova.virt.hardware [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2045.878667] env[63024]: DEBUG nova.virt.hardware [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2045.878824] env[63024]: DEBUG nova.virt.hardware [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2045.879016] env[63024]: DEBUG nova.virt.hardware [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2045.879182] env[63024]: DEBUG nova.virt.hardware [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2045.879336] env[63024]: DEBUG nova.virt.hardware [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2045.879548] env[63024]: DEBUG nova.virt.hardware [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2045.879711] env[63024]: DEBUG nova.virt.hardware [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2045.879880] env[63024]: DEBUG nova.virt.hardware [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2045.880054] env[63024]: DEBUG nova.virt.hardware [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2045.880235] env[63024]: DEBUG nova.virt.hardware [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2045.885266] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b6af969b-7e34-4701-9361-2da6a643b1da {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.896033] env[63024]: DEBUG nova.network.neutron [-] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2045.905196] env[63024]: DEBUG oslo_vmware.api [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 2045.905196] env[63024]: value = "task-1951626" [ 2045.905196] env[63024]: _type = "Task" [ 2045.905196] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2045.914244] env[63024]: DEBUG oslo_vmware.api [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951626, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2045.946823] env[63024]: DEBUG oslo_vmware.api [None req-b2a1a45a-d2e8-4fb5-81fb-4155d202aa5e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951625, 'name': PowerOffVM_Task, 'duration_secs': 0.198246} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2045.947156] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2a1a45a-d2e8-4fb5-81fb-4155d202aa5e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2045.947337] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b2a1a45a-d2e8-4fb5-81fb-4155d202aa5e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2045.947605] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-41f7b124-1b2e-4c36-b876-fef4ea15ad82 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.951199] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e7e6e79b-2727-4683-9940-8ff62026ee67 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.962204] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ddc726f-a80a-4478-8d25-dc092d3029db {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2045.996577] env[63024]: DEBUG nova.compute.manager [req-4aa0eae8-0f42-41a2-bb4f-80f7bd45f35c req-31df75e1-10dc-4d88-99e0-68ea961926ea service nova] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Detach interface failed, port_id=a86b6bec-4924-4d56-ace2-6c00a9a8113c, reason: Instance df5a197c-8e35-44a0-8b9c-63dae50b77ff could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 2046.147299] env[63024]: DEBUG oslo_concurrency.lockutils [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Acquiring lock "refresh_cache-92d1f96e-bbe7-4654-9d3a-47ba40057157" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2046.147486] env[63024]: DEBUG oslo_concurrency.lockutils [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Acquired lock "refresh_cache-92d1f96e-bbe7-4654-9d3a-47ba40057157" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2046.147665] env[63024]: DEBUG nova.network.neutron [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2046.158429] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b2a1a45a-d2e8-4fb5-81fb-4155d202aa5e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2046.158659] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b2a1a45a-d2e8-4fb5-81fb-4155d202aa5e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2046.158848] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2a1a45a-d2e8-4fb5-81fb-4155d202aa5e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Deleting the datastore file [datastore1] fe6847e2-a742-4338-983f-698c13aaefde {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2046.159537] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-adde3b89-471c-423a-85eb-9781b656e176 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.168231] env[63024]: DEBUG oslo_vmware.api [None req-b2a1a45a-d2e8-4fb5-81fb-4155d202aa5e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2046.168231] env[63024]: value = "task-1951628" [ 2046.168231] env[63024]: _type = "Task" [ 2046.168231] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2046.177626] env[63024]: DEBUG oslo_vmware.api [None req-b2a1a45a-d2e8-4fb5-81fb-4155d202aa5e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951628, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2046.268105] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3faa85ed-242c-42ff-afb4-990743c934db tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.876s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2046.271203] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3a428efa-3cb3-4c42-9ce3-9bdf6bfab201 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.867s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2046.271203] env[63024]: DEBUG nova.objects.instance [None req-3a428efa-3cb3-4c42-9ce3-9bdf6bfab201 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Lazy-loading 'resources' on Instance uuid a694e49c-37c5-483f-b1d8-5426f6a52b73 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2046.291347] env[63024]: INFO nova.scheduler.client.report [None req-3faa85ed-242c-42ff-afb4-990743c934db tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Deleted allocations for instance 73db94b8-cfa8-4457-bccb-d4b780edbd93 [ 2046.399066] env[63024]: INFO nova.compute.manager [-] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Took 1.34 seconds to deallocate network for instance. [ 2046.417165] env[63024]: DEBUG oslo_vmware.api [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951626, 'name': ReconfigVM_Task, 'duration_secs': 0.250671} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2046.417486] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Updating instance '9716d592-32d1-4f1d-b42b-1c8a7d81d2f2' progress to 33 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2046.679549] env[63024]: DEBUG oslo_vmware.api [None req-b2a1a45a-d2e8-4fb5-81fb-4155d202aa5e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951628, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151611} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2046.679847] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2a1a45a-d2e8-4fb5-81fb-4155d202aa5e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2046.680050] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b2a1a45a-d2e8-4fb5-81fb-4155d202aa5e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2046.680254] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-b2a1a45a-d2e8-4fb5-81fb-4155d202aa5e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2046.680439] env[63024]: INFO nova.compute.manager [None req-b2a1a45a-d2e8-4fb5-81fb-4155d202aa5e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Took 1.27 seconds to destroy the instance on the hypervisor. [ 2046.680736] env[63024]: DEBUG oslo.service.loopingcall [None req-b2a1a45a-d2e8-4fb5-81fb-4155d202aa5e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2046.680952] env[63024]: DEBUG nova.compute.manager [-] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2046.681063] env[63024]: DEBUG nova.network.neutron [-] [instance: fe6847e2-a742-4338-983f-698c13aaefde] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2046.800747] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3faa85ed-242c-42ff-afb4-990743c934db tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lock "73db94b8-cfa8-4457-bccb-d4b780edbd93" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.046s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2046.905509] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5721a3c5-619f-4791-b8af-3040ef3a9475 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2046.924215] env[63024]: DEBUG nova.virt.hardware [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2046.924479] env[63024]: DEBUG nova.virt.hardware [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2046.924640] env[63024]: DEBUG nova.virt.hardware [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2046.924823] env[63024]: DEBUG nova.virt.hardware [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2046.924972] env[63024]: DEBUG nova.virt.hardware [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2046.925139] env[63024]: DEBUG nova.virt.hardware [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2046.925350] env[63024]: DEBUG nova.virt.hardware [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2046.925510] env[63024]: DEBUG nova.virt.hardware [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2046.925679] env[63024]: DEBUG nova.virt.hardware [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2046.925842] env[63024]: DEBUG nova.virt.hardware [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2046.926022] env[63024]: DEBUG nova.virt.hardware [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2046.931263] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Reconfiguring VM instance instance-00000013 to detach disk 2000 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2046.934153] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2eade1fb-aab3-457c-8253-bbf04cf7cbdf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2046.955022] env[63024]: DEBUG oslo_vmware.api [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 2046.955022] env[63024]: value = "task-1951629" [ 2046.955022] env[63024]: _type = "Task" [ 2046.955022] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2046.963472] env[63024]: DEBUG oslo_vmware.api [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951629, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2047.113621] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0204dc4-644c-4c79-a9a1-501360c71d5e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.123768] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c7618cd-361f-445d-901e-771eb1e69825 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.127510] env[63024]: DEBUG nova.network.neutron [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Updating instance_info_cache with network_info: [{"id": "05bc00c8-444d-425a-8c1e-0d34f269c7e8", "address": "fa:16:3e:d8:3a:2e", "network": {"id": "feb2323b-f3cf-42d6-a22b-81d1c94fce9d", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-75667819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f87eadd82394447910efa7b71814e97", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "44ed8f45-cb8e-40e7-ac70-a7f386a7d2c2", "external-id": "nsx-vlan-transportzone-268", "segmentation_id": 268, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05bc00c8-44", "ovs_interfaceid": "05bc00c8-444d-425a-8c1e-0d34f269c7e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2047.157669] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed1cfa7a-6c85-48b0-a94b-c8d3939bc3aa {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.166751] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ad04225-0bf1-43e7-8d92-c284a0de1b0d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.181843] env[63024]: DEBUG nova.compute.provider_tree [None req-3a428efa-3cb3-4c42-9ce3-9bdf6bfab201 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2047.472856] env[63024]: DEBUG oslo_vmware.api [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951629, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2047.494272] env[63024]: DEBUG nova.compute.manager [req-55a26c68-4944-4e90-9d88-16b0855e61ee req-2e43a6ca-84ae-4dbe-a93e-8ff2372e6bcd service nova] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Received event network-vif-plugged-05bc00c8-444d-425a-8c1e-0d34f269c7e8 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2047.494272] env[63024]: DEBUG oslo_concurrency.lockutils [req-55a26c68-4944-4e90-9d88-16b0855e61ee req-2e43a6ca-84ae-4dbe-a93e-8ff2372e6bcd service nova] Acquiring lock "92d1f96e-bbe7-4654-9d3a-47ba40057157-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2047.494272] env[63024]: DEBUG oslo_concurrency.lockutils [req-55a26c68-4944-4e90-9d88-16b0855e61ee req-2e43a6ca-84ae-4dbe-a93e-8ff2372e6bcd service nova] Lock "92d1f96e-bbe7-4654-9d3a-47ba40057157-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2047.494272] env[63024]: DEBUG oslo_concurrency.lockutils [req-55a26c68-4944-4e90-9d88-16b0855e61ee req-2e43a6ca-84ae-4dbe-a93e-8ff2372e6bcd service nova] Lock "92d1f96e-bbe7-4654-9d3a-47ba40057157-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2047.494272] env[63024]: DEBUG nova.compute.manager [req-55a26c68-4944-4e90-9d88-16b0855e61ee req-2e43a6ca-84ae-4dbe-a93e-8ff2372e6bcd service nova] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] No waiting events found dispatching network-vif-plugged-05bc00c8-444d-425a-8c1e-0d34f269c7e8 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2047.495026] env[63024]: WARNING nova.compute.manager [req-55a26c68-4944-4e90-9d88-16b0855e61ee req-2e43a6ca-84ae-4dbe-a93e-8ff2372e6bcd service nova] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Received unexpected event network-vif-plugged-05bc00c8-444d-425a-8c1e-0d34f269c7e8 for instance with vm_state shelved_offloaded and task_state spawning. [ 2047.495026] env[63024]: DEBUG nova.compute.manager [req-55a26c68-4944-4e90-9d88-16b0855e61ee req-2e43a6ca-84ae-4dbe-a93e-8ff2372e6bcd service nova] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Received event network-changed-05bc00c8-444d-425a-8c1e-0d34f269c7e8 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2047.495201] env[63024]: DEBUG nova.compute.manager [req-55a26c68-4944-4e90-9d88-16b0855e61ee req-2e43a6ca-84ae-4dbe-a93e-8ff2372e6bcd service nova] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Refreshing instance network info cache due to event network-changed-05bc00c8-444d-425a-8c1e-0d34f269c7e8. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2047.495356] env[63024]: DEBUG oslo_concurrency.lockutils [req-55a26c68-4944-4e90-9d88-16b0855e61ee req-2e43a6ca-84ae-4dbe-a93e-8ff2372e6bcd service nova] Acquiring lock "refresh_cache-92d1f96e-bbe7-4654-9d3a-47ba40057157" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2047.631464] env[63024]: DEBUG oslo_concurrency.lockutils [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Releasing lock "refresh_cache-92d1f96e-bbe7-4654-9d3a-47ba40057157" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2047.635039] env[63024]: DEBUG oslo_concurrency.lockutils [req-55a26c68-4944-4e90-9d88-16b0855e61ee req-2e43a6ca-84ae-4dbe-a93e-8ff2372e6bcd service nova] Acquired lock "refresh_cache-92d1f96e-bbe7-4654-9d3a-47ba40057157" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2047.635324] env[63024]: DEBUG nova.network.neutron [req-55a26c68-4944-4e90-9d88-16b0855e61ee req-2e43a6ca-84ae-4dbe-a93e-8ff2372e6bcd service nova] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Refreshing network info cache for port 05bc00c8-444d-425a-8c1e-0d34f269c7e8 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2047.667587] env[63024]: DEBUG nova.virt.hardware [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='704181f9ae34e47282d365beadc4c703',container_format='bare',created_at=2024-12-22T11:13:30Z,direct_url=,disk_format='vmdk',id=07a796d4-2723-41a7-afac-14eecabc2bc1,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-2011017374-shelved',owner='6f87eadd82394447910efa7b71814e97',properties=ImageMetaProps,protected=,size=31592448,status='active',tags=,updated_at=2024-12-22T11:13:45Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2047.667843] env[63024]: DEBUG nova.virt.hardware [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2047.668008] env[63024]: DEBUG nova.virt.hardware [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2047.668309] env[63024]: DEBUG nova.virt.hardware [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2047.668391] env[63024]: DEBUG nova.virt.hardware [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2047.668492] env[63024]: DEBUG nova.virt.hardware [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2047.668702] env[63024]: DEBUG nova.virt.hardware [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2047.668862] env[63024]: DEBUG nova.virt.hardware [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2047.669301] env[63024]: DEBUG nova.virt.hardware [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2047.669482] env[63024]: DEBUG nova.virt.hardware [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2047.669813] env[63024]: DEBUG nova.virt.hardware [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2047.671019] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d80fb952-8b8a-4678-bb13-42a8ba797320 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.680549] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-383437a3-10f4-4c35-9583-bd75893c5de2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.686415] env[63024]: DEBUG nova.scheduler.client.report [None req-3a428efa-3cb3-4c42-9ce3-9bdf6bfab201 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2047.704450] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d8:3a:2e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '44ed8f45-cb8e-40e7-ac70-a7f386a7d2c2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '05bc00c8-444d-425a-8c1e-0d34f269c7e8', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2047.711900] env[63024]: DEBUG oslo.service.loopingcall [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2047.712987] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2047.712987] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cdd084e9-ccf6-4d16-b194-7dd727196dfb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2047.733884] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2047.733884] env[63024]: value = "task-1951630" [ 2047.733884] env[63024]: _type = "Task" [ 2047.733884] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2047.741786] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951630, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2047.885721] env[63024]: DEBUG nova.network.neutron [-] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2047.968065] env[63024]: DEBUG oslo_vmware.api [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951629, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2048.205676] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3a428efa-3cb3-4c42-9ce3-9bdf6bfab201 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.935s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2048.208324] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3f8fc803-bb4f-4617-933f-7972cce55655 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 31.574s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2048.208563] env[63024]: DEBUG nova.objects.instance [None req-3f8fc803-bb4f-4617-933f-7972cce55655 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lazy-loading 'resources' on Instance uuid 50c72c53-ff72-42e6-afdc-14e0ac64f490 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2048.239606] env[63024]: INFO nova.scheduler.client.report [None req-3a428efa-3cb3-4c42-9ce3-9bdf6bfab201 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Deleted allocations for instance a694e49c-37c5-483f-b1d8-5426f6a52b73 [ 2048.247120] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951630, 'name': CreateVM_Task, 'duration_secs': 0.336795} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2048.249851] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2048.250557] env[63024]: DEBUG oslo_concurrency.lockutils [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/07a796d4-2723-41a7-afac-14eecabc2bc1" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2048.250732] env[63024]: DEBUG oslo_concurrency.lockutils [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Acquired lock "[datastore1] devstack-image-cache_base/07a796d4-2723-41a7-afac-14eecabc2bc1" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2048.251110] env[63024]: DEBUG oslo_concurrency.lockutils [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/07a796d4-2723-41a7-afac-14eecabc2bc1" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2048.251542] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9cd726f0-2cb1-4a5a-8da8-987a5e8bfbcb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.257917] env[63024]: DEBUG oslo_vmware.api [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Waiting for the task: (returnval){ [ 2048.257917] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52454881-4e83-d131-2d10-7051d5b5352e" [ 2048.257917] env[63024]: _type = "Task" [ 2048.257917] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2048.266776] env[63024]: DEBUG oslo_vmware.api [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52454881-4e83-d131-2d10-7051d5b5352e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2048.390996] env[63024]: INFO nova.compute.manager [-] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Took 1.71 seconds to deallocate network for instance. [ 2048.469079] env[63024]: DEBUG oslo_vmware.api [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951629, 'name': ReconfigVM_Task, 'duration_secs': 1.191112} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2048.469465] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Reconfigured VM instance instance-00000013 to detach disk 2000 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2048.470402] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d986366-056d-49ff-a926-37255c3807a2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.493191] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Reconfiguring VM instance instance-00000013 to attach disk [datastore1] 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2/9716d592-32d1-4f1d-b42b-1c8a7d81d2f2.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2048.494513] env[63024]: DEBUG nova.network.neutron [req-55a26c68-4944-4e90-9d88-16b0855e61ee req-2e43a6ca-84ae-4dbe-a93e-8ff2372e6bcd service nova] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Updated VIF entry in instance network info cache for port 05bc00c8-444d-425a-8c1e-0d34f269c7e8. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2048.495177] env[63024]: DEBUG nova.network.neutron [req-55a26c68-4944-4e90-9d88-16b0855e61ee req-2e43a6ca-84ae-4dbe-a93e-8ff2372e6bcd service nova] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Updating instance_info_cache with network_info: [{"id": "05bc00c8-444d-425a-8c1e-0d34f269c7e8", "address": "fa:16:3e:d8:3a:2e", "network": {"id": "feb2323b-f3cf-42d6-a22b-81d1c94fce9d", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-75667819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f87eadd82394447910efa7b71814e97", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "44ed8f45-cb8e-40e7-ac70-a7f386a7d2c2", "external-id": "nsx-vlan-transportzone-268", "segmentation_id": 268, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05bc00c8-44", "ovs_interfaceid": "05bc00c8-444d-425a-8c1e-0d34f269c7e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2048.496640] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e5f8a0cc-1537-44f7-88b8-346cc92c2f7b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.511208] env[63024]: DEBUG oslo_concurrency.lockutils [req-55a26c68-4944-4e90-9d88-16b0855e61ee req-2e43a6ca-84ae-4dbe-a93e-8ff2372e6bcd service nova] Releasing lock "refresh_cache-92d1f96e-bbe7-4654-9d3a-47ba40057157" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2048.511517] env[63024]: DEBUG nova.compute.manager [req-55a26c68-4944-4e90-9d88-16b0855e61ee req-2e43a6ca-84ae-4dbe-a93e-8ff2372e6bcd service nova] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Received event network-vif-deleted-a86b5113-d05e-45ac-bf54-833ea769eae5 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2048.511721] env[63024]: INFO nova.compute.manager [req-55a26c68-4944-4e90-9d88-16b0855e61ee req-2e43a6ca-84ae-4dbe-a93e-8ff2372e6bcd service nova] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Neutron deleted interface a86b5113-d05e-45ac-bf54-833ea769eae5; detaching it from the instance and deleting it from the info cache [ 2048.511935] env[63024]: DEBUG nova.network.neutron [req-55a26c68-4944-4e90-9d88-16b0855e61ee req-2e43a6ca-84ae-4dbe-a93e-8ff2372e6bcd service nova] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2048.521967] env[63024]: DEBUG oslo_vmware.api [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 2048.521967] env[63024]: value = "task-1951631" [ 2048.521967] env[63024]: _type = "Task" [ 2048.521967] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2048.533810] env[63024]: DEBUG oslo_vmware.api [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951631, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2048.757983] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3a428efa-3cb3-4c42-9ce3-9bdf6bfab201 tempest-InstanceActionsNegativeTestJSON-1588332819 tempest-InstanceActionsNegativeTestJSON-1588332819-project-member] Lock "a694e49c-37c5-483f-b1d8-5426f6a52b73" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.123s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2048.772165] env[63024]: DEBUG oslo_concurrency.lockutils [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Releasing lock "[datastore1] devstack-image-cache_base/07a796d4-2723-41a7-afac-14eecabc2bc1" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2048.772457] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Processing image 07a796d4-2723-41a7-afac-14eecabc2bc1 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2048.772701] env[63024]: DEBUG oslo_concurrency.lockutils [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/07a796d4-2723-41a7-afac-14eecabc2bc1/07a796d4-2723-41a7-afac-14eecabc2bc1.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2048.772849] env[63024]: DEBUG oslo_concurrency.lockutils [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Acquired lock "[datastore1] devstack-image-cache_base/07a796d4-2723-41a7-afac-14eecabc2bc1/07a796d4-2723-41a7-afac-14eecabc2bc1.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2048.773346] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2048.774971] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d3ff89c5-3621-4cc8-97bc-4246eaa2a0a4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.803052] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2048.803272] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2048.804030] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10b34a5f-a399-4d2b-9620-816c64aa2a66 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.811493] env[63024]: DEBUG oslo_vmware.api [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Waiting for the task: (returnval){ [ 2048.811493] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e80090-08c9-b33b-d581-08cb33c75ac5" [ 2048.811493] env[63024]: _type = "Task" [ 2048.811493] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2048.819391] env[63024]: DEBUG oslo_vmware.api [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e80090-08c9-b33b-d581-08cb33c75ac5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2048.896706] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b2a1a45a-d2e8-4fb5-81fb-4155d202aa5e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2048.941106] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57d74b76-b7ef-4841-aec8-d5ff8b99430d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.949119] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bdcbdee-b90c-43be-99cd-df8a0a1b4a68 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.978427] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e79aa733-b9a0-464c-b6ac-463788c996ea {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.985227] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c678019a-d754-4a2f-bf8e-717a229864c1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2048.997860] env[63024]: DEBUG nova.compute.provider_tree [None req-3f8fc803-bb4f-4617-933f-7972cce55655 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2049.014688] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6ddb0874-9065-46b7-ad01-4ac7b27318a2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.023863] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ae08b36-5842-4501-9f94-48ba4d19abdf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.047562] env[63024]: DEBUG oslo_vmware.api [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951631, 'name': ReconfigVM_Task, 'duration_secs': 0.283015} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2049.047887] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Reconfigured VM instance instance-00000013 to attach disk [datastore1] 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2/9716d592-32d1-4f1d-b42b-1c8a7d81d2f2.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2049.048197] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Updating instance '9716d592-32d1-4f1d-b42b-1c8a7d81d2f2' progress to 50 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2049.061221] env[63024]: DEBUG nova.compute.manager [req-55a26c68-4944-4e90-9d88-16b0855e61ee req-2e43a6ca-84ae-4dbe-a93e-8ff2372e6bcd service nova] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Detach interface failed, port_id=a86b5113-d05e-45ac-bf54-833ea769eae5, reason: Instance fe6847e2-a742-4338-983f-698c13aaefde could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 2049.145246] env[63024]: DEBUG oslo_concurrency.lockutils [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Acquiring lock "5192ad93-a4e9-4aa0-983d-186ab17360f0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2049.145488] env[63024]: DEBUG oslo_concurrency.lockutils [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lock "5192ad93-a4e9-4aa0-983d-186ab17360f0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2049.322920] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Preparing fetch location {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2049.323285] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Fetch image to [datastore1] OSTACK_IMG_c3eacfb4-db77-4416-b771-5186b160062c/OSTACK_IMG_c3eacfb4-db77-4416-b771-5186b160062c.vmdk {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2049.323545] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Downloading stream optimized image 07a796d4-2723-41a7-afac-14eecabc2bc1 to [datastore1] OSTACK_IMG_c3eacfb4-db77-4416-b771-5186b160062c/OSTACK_IMG_c3eacfb4-db77-4416-b771-5186b160062c.vmdk on the data store datastore1 as vApp {{(pid=63024) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 2049.323797] env[63024]: DEBUG nova.virt.vmwareapi.images [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Downloading image file data 07a796d4-2723-41a7-afac-14eecabc2bc1 to the ESX as VM named 'OSTACK_IMG_c3eacfb4-db77-4416-b771-5186b160062c' {{(pid=63024) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 2049.405415] env[63024]: DEBUG oslo_vmware.rw_handles [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 2049.405415] env[63024]: value = "resgroup-9" [ 2049.405415] env[63024]: _type = "ResourcePool" [ 2049.405415] env[63024]: }. {{(pid=63024) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 2049.406062] env[63024]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-96ea40f5-7160-4414-b9b2-7496efb7c90a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.429101] env[63024]: DEBUG oslo_vmware.rw_handles [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Lease: (returnval){ [ 2049.429101] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]522db80d-4a2b-39fd-767f-6f775c89b54b" [ 2049.429101] env[63024]: _type = "HttpNfcLease" [ 2049.429101] env[63024]: } obtained for vApp import into resource pool (val){ [ 2049.429101] env[63024]: value = "resgroup-9" [ 2049.429101] env[63024]: _type = "ResourcePool" [ 2049.429101] env[63024]: }. {{(pid=63024) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 2049.429370] env[63024]: DEBUG oslo_vmware.api [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Waiting for the lease: (returnval){ [ 2049.429370] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]522db80d-4a2b-39fd-767f-6f775c89b54b" [ 2049.429370] env[63024]: _type = "HttpNfcLease" [ 2049.429370] env[63024]: } to be ready. {{(pid=63024) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2049.438053] env[63024]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2049.438053] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]522db80d-4a2b-39fd-767f-6f775c89b54b" [ 2049.438053] env[63024]: _type = "HttpNfcLease" [ 2049.438053] env[63024]: } is initializing. {{(pid=63024) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2049.501270] env[63024]: DEBUG nova.scheduler.client.report [None req-3f8fc803-bb4f-4617-933f-7972cce55655 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2049.566177] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-038fc441-afae-4665-b0e7-f6730d950f3b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.586733] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31add5a4-e75a-4ac1-95f1-73313362f583 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.610859] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Updating instance '9716d592-32d1-4f1d-b42b-1c8a7d81d2f2' progress to 67 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2049.648599] env[63024]: DEBUG nova.compute.manager [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2049.940880] env[63024]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2049.940880] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]522db80d-4a2b-39fd-767f-6f775c89b54b" [ 2049.940880] env[63024]: _type = "HttpNfcLease" [ 2049.940880] env[63024]: } is ready. {{(pid=63024) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2049.941392] env[63024]: DEBUG oslo_vmware.rw_handles [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2049.941392] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]522db80d-4a2b-39fd-767f-6f775c89b54b" [ 2049.941392] env[63024]: _type = "HttpNfcLease" [ 2049.941392] env[63024]: }. {{(pid=63024) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 2049.942485] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94a408a2-4125-4546-9e7f-a21b73abfa19 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2049.952012] env[63024]: DEBUG oslo_vmware.rw_handles [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5235ce15-6df2-2d5c-fa18-704aba9ea356/disk-0.vmdk from lease info. {{(pid=63024) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2049.952307] env[63024]: DEBUG oslo_vmware.rw_handles [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Creating HTTP connection to write to file with size = 31592448 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5235ce15-6df2-2d5c-fa18-704aba9ea356/disk-0.vmdk. {{(pid=63024) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2050.015433] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3f8fc803-bb4f-4617-933f-7972cce55655 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.807s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2050.019440] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.847s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2050.021856] env[63024]: INFO nova.compute.claims [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2050.027558] env[63024]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-f46e227e-ee1e-4c9b-8e27-04c4e10b427d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2050.045771] env[63024]: INFO nova.scheduler.client.report [None req-3f8fc803-bb4f-4617-933f-7972cce55655 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Deleted allocations for instance 50c72c53-ff72-42e6-afdc-14e0ac64f490 [ 2050.167917] env[63024]: DEBUG oslo_concurrency.lockutils [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2050.207158] env[63024]: DEBUG nova.network.neutron [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Port 6e0e9732-b318-4b20-ad72-8c2bc07eaf34 binding to destination host cpu-1 is already ACTIVE {{(pid=63024) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2050.561557] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3f8fc803-bb4f-4617-933f-7972cce55655 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "50c72c53-ff72-42e6-afdc-14e0ac64f490" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.652s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2051.225654] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "9716d592-32d1-4f1d-b42b-1c8a7d81d2f2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2051.226026] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "9716d592-32d1-4f1d-b42b-1c8a7d81d2f2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2051.226195] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "9716d592-32d1-4f1d-b42b-1c8a7d81d2f2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2051.276133] env[63024]: DEBUG oslo_vmware.rw_handles [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Completed reading data from the image iterator. {{(pid=63024) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2051.276358] env[63024]: DEBUG oslo_vmware.rw_handles [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5235ce15-6df2-2d5c-fa18-704aba9ea356/disk-0.vmdk. {{(pid=63024) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2051.277300] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eb58255-8838-4115-871e-56f99b6b6c56 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.288489] env[63024]: DEBUG oslo_vmware.rw_handles [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5235ce15-6df2-2d5c-fa18-704aba9ea356/disk-0.vmdk is in state: ready. {{(pid=63024) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2051.288676] env[63024]: DEBUG oslo_vmware.rw_handles [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5235ce15-6df2-2d5c-fa18-704aba9ea356/disk-0.vmdk. {{(pid=63024) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 2051.289061] env[63024]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-401baacd-7267-44fa-85db-62177550e44e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.308145] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dbb5fbd-f6f7-497d-bb20-9c5d2795b50b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.316839] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa200cd6-c59c-49de-9259-7999663d5001 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.349013] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0a620af-c4ca-45de-86af-cd2a2062b514 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.359749] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22b6a4fe-13d3-4c44-87b3-22568789a7da {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.374544] env[63024]: DEBUG nova.compute.provider_tree [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2051.653909] env[63024]: DEBUG oslo_vmware.rw_handles [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5235ce15-6df2-2d5c-fa18-704aba9ea356/disk-0.vmdk. {{(pid=63024) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 2051.654051] env[63024]: INFO nova.virt.vmwareapi.images [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Downloaded image file data 07a796d4-2723-41a7-afac-14eecabc2bc1 [ 2051.654976] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce2f4820-2139-47e8-a5d0-b9075842471a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.670880] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a8547af1-f877-40ff-accf-bb9ffbd811ee {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.707709] env[63024]: INFO nova.virt.vmwareapi.images [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] The imported VM was unregistered [ 2051.710129] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Caching image {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2051.710371] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Creating directory with path [datastore1] devstack-image-cache_base/07a796d4-2723-41a7-afac-14eecabc2bc1 {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2051.710712] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1162cbd6-3e4d-42b1-8106-8814388af240 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.722743] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Created directory with path [datastore1] devstack-image-cache_base/07a796d4-2723-41a7-afac-14eecabc2bc1 {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2051.722955] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_c3eacfb4-db77-4416-b771-5186b160062c/OSTACK_IMG_c3eacfb4-db77-4416-b771-5186b160062c.vmdk to [datastore1] devstack-image-cache_base/07a796d4-2723-41a7-afac-14eecabc2bc1/07a796d4-2723-41a7-afac-14eecabc2bc1.vmdk. {{(pid=63024) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 2051.723231] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-e0b12506-0903-4da6-99f8-a2a612f2679c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2051.730738] env[63024]: DEBUG oslo_vmware.api [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Waiting for the task: (returnval){ [ 2051.730738] env[63024]: value = "task-1951634" [ 2051.730738] env[63024]: _type = "Task" [ 2051.730738] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2051.740733] env[63024]: DEBUG oslo_vmware.api [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951634, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2051.856577] env[63024]: DEBUG oslo_concurrency.lockutils [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "881b1f35-206e-4c3f-bf7a-d1774a9343c2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2051.856828] env[63024]: DEBUG oslo_concurrency.lockutils [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "881b1f35-206e-4c3f-bf7a-d1774a9343c2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2051.877617] env[63024]: DEBUG nova.scheduler.client.report [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2052.248593] env[63024]: DEBUG oslo_vmware.api [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951634, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2052.290279] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "refresh_cache-9716d592-32d1-4f1d-b42b-1c8a7d81d2f2" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2052.290474] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquired lock "refresh_cache-9716d592-32d1-4f1d-b42b-1c8a7d81d2f2" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2052.290756] env[63024]: DEBUG nova.network.neutron [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2052.359082] env[63024]: DEBUG nova.compute.manager [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2052.385024] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.364s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2052.385024] env[63024]: DEBUG nova.compute.manager [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2052.387189] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f409249d-1f3e-493a-a133-cf12e9494c5b tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.606s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2052.387539] env[63024]: DEBUG nova.objects.instance [None req-f409249d-1f3e-493a-a133-cf12e9494c5b tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Lazy-loading 'resources' on Instance uuid da1f5cbc-47bf-4ee4-837a-b328de170489 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2052.742730] env[63024]: DEBUG oslo_vmware.api [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951634, 'name': MoveVirtualDisk_Task} progress is 40%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2052.884769] env[63024]: DEBUG oslo_concurrency.lockutils [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2052.893580] env[63024]: DEBUG nova.compute.utils [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2052.897777] env[63024]: DEBUG nova.compute.manager [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2052.898013] env[63024]: DEBUG nova.network.neutron [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2052.980925] env[63024]: DEBUG nova.policy [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6fc84a6eed984429b26a693ce7b0876e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9521048e807c4ca2a6d2e74a72b829a3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 2053.175756] env[63024]: DEBUG nova.network.neutron [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Updating instance_info_cache with network_info: [{"id": "6e0e9732-b318-4b20-ad72-8c2bc07eaf34", "address": "fa:16:3e:2b:cc:65", "network": {"id": "0cbe22f7-6322-4d92-9a77-2753f6449a2d", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-366684254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.141", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6072e8931d9540ad8fe4a2b4b1ec782d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3ccbdbb-8b49-4a26-913f-2a448b72280f", "external-id": "nsx-vlan-transportzone-412", "segmentation_id": 412, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e0e9732-b3", "ovs_interfaceid": "6e0e9732-b318-4b20-ad72-8c2bc07eaf34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2053.185345] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e36b7c2b-b0bc-4c87-b2c7-f240c6616ef9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.196021] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86f03f77-f98e-4d76-8552-b6ba37fed583 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.232707] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b3fc8cd-9558-4713-94a9-f5ecdce90082 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.248257] env[63024]: DEBUG oslo_vmware.api [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951634, 'name': MoveVirtualDisk_Task} progress is 63%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2053.250103] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2d26528-08da-42c1-a74a-ba3dc45e24ee {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2053.268138] env[63024]: DEBUG nova.compute.provider_tree [None req-f409249d-1f3e-493a-a133-cf12e9494c5b tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2053.401543] env[63024]: DEBUG nova.compute.manager [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2053.479896] env[63024]: DEBUG nova.network.neutron [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Successfully created port: 340baee8-fd68-482a-94ce-82df41470c62 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2053.678253] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Releasing lock "refresh_cache-9716d592-32d1-4f1d-b42b-1c8a7d81d2f2" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2053.749405] env[63024]: DEBUG oslo_vmware.api [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951634, 'name': MoveVirtualDisk_Task} progress is 85%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2053.771247] env[63024]: DEBUG nova.scheduler.client.report [None req-f409249d-1f3e-493a-a133-cf12e9494c5b tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2054.207052] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71f79011-5866-4539-b1b9-5bfc5d15cbda {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.227447] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67dea779-779d-4bb2-8e11-37006a96c5d6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.234958] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Updating instance '9716d592-32d1-4f1d-b42b-1c8a7d81d2f2' progress to 83 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2054.246609] env[63024]: DEBUG oslo_vmware.api [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951634, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.387222} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2054.246902] env[63024]: INFO nova.virt.vmwareapi.ds_util [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_c3eacfb4-db77-4416-b771-5186b160062c/OSTACK_IMG_c3eacfb4-db77-4416-b771-5186b160062c.vmdk to [datastore1] devstack-image-cache_base/07a796d4-2723-41a7-afac-14eecabc2bc1/07a796d4-2723-41a7-afac-14eecabc2bc1.vmdk. [ 2054.247157] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Cleaning up location [datastore1] OSTACK_IMG_c3eacfb4-db77-4416-b771-5186b160062c {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 2054.247330] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_c3eacfb4-db77-4416-b771-5186b160062c {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2054.247569] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b88fbddb-3af4-46c3-b9d0-728a5e9523d2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.253584] env[63024]: DEBUG oslo_vmware.api [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Waiting for the task: (returnval){ [ 2054.253584] env[63024]: value = "task-1951635" [ 2054.253584] env[63024]: _type = "Task" [ 2054.253584] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2054.262466] env[63024]: DEBUG oslo_vmware.api [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951635, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2054.276467] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f409249d-1f3e-493a-a133-cf12e9494c5b tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.889s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2054.279561] env[63024]: DEBUG oslo_concurrency.lockutils [None req-25d047c0-30f0-4b26-826b-021125ebcfb4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.628s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2054.279789] env[63024]: DEBUG nova.objects.instance [None req-25d047c0-30f0-4b26-826b-021125ebcfb4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Lazy-loading 'resources' on Instance uuid 31a693b6-293a-4f01-9baf-a9e7e8d453d4 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2054.294997] env[63024]: INFO nova.scheduler.client.report [None req-f409249d-1f3e-493a-a133-cf12e9494c5b tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Deleted allocations for instance da1f5cbc-47bf-4ee4-837a-b328de170489 [ 2054.412815] env[63024]: DEBUG nova.compute.manager [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2054.438300] env[63024]: DEBUG nova.virt.hardware [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2054.438569] env[63024]: DEBUG nova.virt.hardware [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2054.438729] env[63024]: DEBUG nova.virt.hardware [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2054.438909] env[63024]: DEBUG nova.virt.hardware [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2054.439077] env[63024]: DEBUG nova.virt.hardware [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2054.439232] env[63024]: DEBUG nova.virt.hardware [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2054.439439] env[63024]: DEBUG nova.virt.hardware [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2054.439594] env[63024]: DEBUG nova.virt.hardware [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2054.439761] env[63024]: DEBUG nova.virt.hardware [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2054.439919] env[63024]: DEBUG nova.virt.hardware [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2054.440106] env[63024]: DEBUG nova.virt.hardware [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2054.440988] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-036f95f8-80a3-4e3f-8e11-a04546bc3fa0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.449419] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b356149e-98a7-488b-8833-446bd1a1bb18 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.744493] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2054.744834] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-798df547-9059-4805-81b2-fffc31b82de1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.754343] env[63024]: DEBUG oslo_vmware.api [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 2054.754343] env[63024]: value = "task-1951636" [ 2054.754343] env[63024]: _type = "Task" [ 2054.754343] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2054.765266] env[63024]: DEBUG oslo_vmware.api [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951636, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2054.767814] env[63024]: DEBUG oslo_vmware.api [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951635, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.037146} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2054.768069] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2054.768238] env[63024]: DEBUG oslo_concurrency.lockutils [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Releasing lock "[datastore1] devstack-image-cache_base/07a796d4-2723-41a7-afac-14eecabc2bc1/07a796d4-2723-41a7-afac-14eecabc2bc1.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2054.768483] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/07a796d4-2723-41a7-afac-14eecabc2bc1/07a796d4-2723-41a7-afac-14eecabc2bc1.vmdk to [datastore1] 92d1f96e-bbe7-4654-9d3a-47ba40057157/92d1f96e-bbe7-4654-9d3a-47ba40057157.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2054.768715] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3be243d0-b21a-48f1-b4eb-4431a12d993a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2054.775106] env[63024]: DEBUG oslo_vmware.api [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Waiting for the task: (returnval){ [ 2054.775106] env[63024]: value = "task-1951637" [ 2054.775106] env[63024]: _type = "Task" [ 2054.775106] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2054.786145] env[63024]: DEBUG oslo_vmware.api [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951637, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2054.806311] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f409249d-1f3e-493a-a133-cf12e9494c5b tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Lock "da1f5cbc-47bf-4ee4-837a-b328de170489" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.829s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2055.063734] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8c2de44-e2f7-4244-9832-92e9afe85aaf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.074428] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-304567a7-fadd-47a9-971c-3692068669dc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.110665] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a024e707-274d-4150-a650-98a17b907569 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.121593] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71da7505-ea98-467d-9356-60ff79c1abca {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2055.138785] env[63024]: DEBUG nova.compute.provider_tree [None req-25d047c0-30f0-4b26-826b-021125ebcfb4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2055.271095] env[63024]: DEBUG oslo_vmware.api [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951636, 'name': PowerOnVM_Task, 'duration_secs': 0.433511} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2055.271449] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2055.271635] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d94a101a-ca2e-45e3-b0db-567ce72ab387 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Updating instance '9716d592-32d1-4f1d-b42b-1c8a7d81d2f2' progress to 100 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2055.288528] env[63024]: DEBUG oslo_vmware.api [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951637, 'name': CopyVirtualDisk_Task} progress is 18%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2055.402989] env[63024]: DEBUG nova.compute.manager [req-fd7e5aee-480b-4e65-8c96-bb89eb203b92 req-a165850a-23cb-415d-878d-989fb34adbbe service nova] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Received event network-vif-plugged-340baee8-fd68-482a-94ce-82df41470c62 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2055.403323] env[63024]: DEBUG oslo_concurrency.lockutils [req-fd7e5aee-480b-4e65-8c96-bb89eb203b92 req-a165850a-23cb-415d-878d-989fb34adbbe service nova] Acquiring lock "8826c266-659c-46ad-bb02-aefdffab8699-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2055.403503] env[63024]: DEBUG oslo_concurrency.lockutils [req-fd7e5aee-480b-4e65-8c96-bb89eb203b92 req-a165850a-23cb-415d-878d-989fb34adbbe service nova] Lock "8826c266-659c-46ad-bb02-aefdffab8699-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2055.403630] env[63024]: DEBUG oslo_concurrency.lockutils [req-fd7e5aee-480b-4e65-8c96-bb89eb203b92 req-a165850a-23cb-415d-878d-989fb34adbbe service nova] Lock "8826c266-659c-46ad-bb02-aefdffab8699-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2055.403835] env[63024]: DEBUG nova.compute.manager [req-fd7e5aee-480b-4e65-8c96-bb89eb203b92 req-a165850a-23cb-415d-878d-989fb34adbbe service nova] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] No waiting events found dispatching network-vif-plugged-340baee8-fd68-482a-94ce-82df41470c62 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2055.404078] env[63024]: WARNING nova.compute.manager [req-fd7e5aee-480b-4e65-8c96-bb89eb203b92 req-a165850a-23cb-415d-878d-989fb34adbbe service nova] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Received unexpected event network-vif-plugged-340baee8-fd68-482a-94ce-82df41470c62 for instance with vm_state building and task_state spawning. [ 2055.595072] env[63024]: DEBUG nova.network.neutron [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Successfully updated port: 340baee8-fd68-482a-94ce-82df41470c62 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2055.642109] env[63024]: DEBUG nova.scheduler.client.report [None req-25d047c0-30f0-4b26-826b-021125ebcfb4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2055.807422] env[63024]: DEBUG oslo_vmware.api [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951637, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2056.098081] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "refresh_cache-8826c266-659c-46ad-bb02-aefdffab8699" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2056.098081] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquired lock "refresh_cache-8826c266-659c-46ad-bb02-aefdffab8699" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2056.098081] env[63024]: DEBUG nova.network.neutron [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2056.147635] env[63024]: DEBUG oslo_concurrency.lockutils [None req-25d047c0-30f0-4b26-826b-021125ebcfb4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.868s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2056.150208] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.836s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2056.151934] env[63024]: INFO nova.compute.claims [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2056.168254] env[63024]: INFO nova.scheduler.client.report [None req-25d047c0-30f0-4b26-826b-021125ebcfb4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Deleted allocations for instance 31a693b6-293a-4f01-9baf-a9e7e8d453d4 [ 2056.294549] env[63024]: DEBUG oslo_vmware.api [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951637, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2056.435360] env[63024]: DEBUG oslo_concurrency.lockutils [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Acquiring lock "2aa624cb-b36a-43c9-8407-37383f196563" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2056.435613] env[63024]: DEBUG oslo_concurrency.lockutils [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Lock "2aa624cb-b36a-43c9-8407-37383f196563" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2056.647993] env[63024]: DEBUG nova.network.neutron [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2056.676230] env[63024]: DEBUG oslo_concurrency.lockutils [None req-25d047c0-30f0-4b26-826b-021125ebcfb4 tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Lock "31a693b6-293a-4f01-9baf-a9e7e8d453d4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.680s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2056.789767] env[63024]: DEBUG oslo_vmware.api [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951637, 'name': CopyVirtualDisk_Task} progress is 83%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2056.873219] env[63024]: DEBUG nova.network.neutron [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Updating instance_info_cache with network_info: [{"id": "340baee8-fd68-482a-94ce-82df41470c62", "address": "fa:16:3e:0c:e1:92", "network": {"id": "ffb24eaf-c6b6-414f-a69a-0c8806712ddd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-281590202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9521048e807c4ca2a6d2e74a72b829a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap340baee8-fd", "ovs_interfaceid": "340baee8-fd68-482a-94ce-82df41470c62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2056.939058] env[63024]: DEBUG nova.compute.manager [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2057.293254] env[63024]: DEBUG oslo_vmware.api [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951637, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.380122} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2057.293550] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/07a796d4-2723-41a7-afac-14eecabc2bc1/07a796d4-2723-41a7-afac-14eecabc2bc1.vmdk to [datastore1] 92d1f96e-bbe7-4654-9d3a-47ba40057157/92d1f96e-bbe7-4654-9d3a-47ba40057157.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2057.294457] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99d483ca-f1b3-451b-aa87-15870c553eea {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.324593] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Reconfiguring VM instance instance-0000003e to attach disk [datastore1] 92d1f96e-bbe7-4654-9d3a-47ba40057157/92d1f96e-bbe7-4654-9d3a-47ba40057157.vmdk or device None with type streamOptimized {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2057.331959] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fd918380-d3ea-4faf-98b8-92436446e2e4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.357832] env[63024]: DEBUG oslo_vmware.api [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Waiting for the task: (returnval){ [ 2057.357832] env[63024]: value = "task-1951638" [ 2057.357832] env[63024]: _type = "Task" [ 2057.357832] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2057.371634] env[63024]: DEBUG oslo_vmware.api [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951638, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2057.376455] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Releasing lock "refresh_cache-8826c266-659c-46ad-bb02-aefdffab8699" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2057.376769] env[63024]: DEBUG nova.compute.manager [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Instance network_info: |[{"id": "340baee8-fd68-482a-94ce-82df41470c62", "address": "fa:16:3e:0c:e1:92", "network": {"id": "ffb24eaf-c6b6-414f-a69a-0c8806712ddd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-281590202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9521048e807c4ca2a6d2e74a72b829a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap340baee8-fd", "ovs_interfaceid": "340baee8-fd68-482a-94ce-82df41470c62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2057.377407] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0c:e1:92', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e445fb59-822c-4d7d-943b-c8e3bbaca62e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '340baee8-fd68-482a-94ce-82df41470c62', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2057.385828] env[63024]: DEBUG oslo.service.loopingcall [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2057.386625] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2057.386879] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8e1c2aa4-6a63-4b18-853c-4ca9919b91c3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.411324] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2057.411324] env[63024]: value = "task-1951639" [ 2057.411324] env[63024]: _type = "Task" [ 2057.411324] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2057.422853] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951639, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2057.462245] env[63024]: DEBUG oslo_concurrency.lockutils [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2057.485954] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97129298-01be-4286-92ed-e08d0dde2a9b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.493977] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b84deef-1460-439c-b13a-24bc8ead904c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.526647] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47caaa0a-4e89-4492-ac58-c5e2f442e40f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.530329] env[63024]: DEBUG nova.compute.manager [req-4dd8b8f0-25bd-4f66-a4e2-5992a5e681dc req-90420734-1ce3-490f-ae62-f2a007ff22bc service nova] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Received event network-changed-340baee8-fd68-482a-94ce-82df41470c62 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2057.530724] env[63024]: DEBUG nova.compute.manager [req-4dd8b8f0-25bd-4f66-a4e2-5992a5e681dc req-90420734-1ce3-490f-ae62-f2a007ff22bc service nova] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Refreshing instance network info cache due to event network-changed-340baee8-fd68-482a-94ce-82df41470c62. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2057.530805] env[63024]: DEBUG oslo_concurrency.lockutils [req-4dd8b8f0-25bd-4f66-a4e2-5992a5e681dc req-90420734-1ce3-490f-ae62-f2a007ff22bc service nova] Acquiring lock "refresh_cache-8826c266-659c-46ad-bb02-aefdffab8699" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2057.531060] env[63024]: DEBUG oslo_concurrency.lockutils [req-4dd8b8f0-25bd-4f66-a4e2-5992a5e681dc req-90420734-1ce3-490f-ae62-f2a007ff22bc service nova] Acquired lock "refresh_cache-8826c266-659c-46ad-bb02-aefdffab8699" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2057.531252] env[63024]: DEBUG nova.network.neutron [req-4dd8b8f0-25bd-4f66-a4e2-5992a5e681dc req-90420734-1ce3-490f-ae62-f2a007ff22bc service nova] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Refreshing network info cache for port 340baee8-fd68-482a-94ce-82df41470c62 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2057.539290] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4f17e18-cbcf-45b1-bd8b-69ace6cddd76 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.544211] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fb7841a6-414c-498d-b9c6-f9dd94a3e267 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "9716d592-32d1-4f1d-b42b-1c8a7d81d2f2" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2057.544444] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fb7841a6-414c-498d-b9c6-f9dd94a3e267 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "9716d592-32d1-4f1d-b42b-1c8a7d81d2f2" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2057.544626] env[63024]: DEBUG nova.compute.manager [None req-fb7841a6-414c-498d-b9c6-f9dd94a3e267 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Going to confirm migration 6 {{(pid=63024) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5113}} [ 2057.557186] env[63024]: DEBUG nova.compute.provider_tree [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2057.850470] env[63024]: DEBUG oslo_concurrency.lockutils [None req-de55f60e-a5ba-41ee-9e4b-1715b36a73fd tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquiring lock "b588ea21-dea0-4ee6-8f9e-12007d0a1ce1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2057.850470] env[63024]: DEBUG oslo_concurrency.lockutils [None req-de55f60e-a5ba-41ee-9e4b-1715b36a73fd tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Lock "b588ea21-dea0-4ee6-8f9e-12007d0a1ce1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2057.850470] env[63024]: DEBUG oslo_concurrency.lockutils [None req-de55f60e-a5ba-41ee-9e4b-1715b36a73fd tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquiring lock "b588ea21-dea0-4ee6-8f9e-12007d0a1ce1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2057.850470] env[63024]: DEBUG oslo_concurrency.lockutils [None req-de55f60e-a5ba-41ee-9e4b-1715b36a73fd tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Lock "b588ea21-dea0-4ee6-8f9e-12007d0a1ce1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2057.850470] env[63024]: DEBUG oslo_concurrency.lockutils [None req-de55f60e-a5ba-41ee-9e4b-1715b36a73fd tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Lock "b588ea21-dea0-4ee6-8f9e-12007d0a1ce1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2057.852343] env[63024]: INFO nova.compute.manager [None req-de55f60e-a5ba-41ee-9e4b-1715b36a73fd tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Terminating instance [ 2057.869123] env[63024]: DEBUG oslo_vmware.api [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951638, 'name': ReconfigVM_Task, 'duration_secs': 0.298583} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2057.869432] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Reconfigured VM instance instance-0000003e to attach disk [datastore1] 92d1f96e-bbe7-4654-9d3a-47ba40057157/92d1f96e-bbe7-4654-9d3a-47ba40057157.vmdk or device None with type streamOptimized {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2057.870192] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-191afd6f-4248-4ea3-adc9-a14107849cbc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.878707] env[63024]: DEBUG oslo_vmware.api [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Waiting for the task: (returnval){ [ 2057.878707] env[63024]: value = "task-1951640" [ 2057.878707] env[63024]: _type = "Task" [ 2057.878707] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2057.886998] env[63024]: DEBUG oslo_vmware.api [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951640, 'name': Rename_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2057.921080] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951639, 'name': CreateVM_Task, 'duration_secs': 0.424134} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2057.921255] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2057.921856] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2057.922046] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2057.922390] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2057.922685] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-999a4af3-cde3-4378-9472-9fbeb78f37a7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2057.927642] env[63024]: DEBUG oslo_vmware.api [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 2057.927642] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]528bf722-a2ad-8bcc-9a65-5648c9938aba" [ 2057.927642] env[63024]: _type = "Task" [ 2057.927642] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2057.936310] env[63024]: DEBUG oslo_vmware.api [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]528bf722-a2ad-8bcc-9a65-5648c9938aba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2058.060336] env[63024]: DEBUG nova.scheduler.client.report [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2058.090439] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fb7841a6-414c-498d-b9c6-f9dd94a3e267 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "refresh_cache-9716d592-32d1-4f1d-b42b-1c8a7d81d2f2" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2058.090653] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fb7841a6-414c-498d-b9c6-f9dd94a3e267 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquired lock "refresh_cache-9716d592-32d1-4f1d-b42b-1c8a7d81d2f2" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2058.090824] env[63024]: DEBUG nova.network.neutron [None req-fb7841a6-414c-498d-b9c6-f9dd94a3e267 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2058.091014] env[63024]: DEBUG nova.objects.instance [None req-fb7841a6-414c-498d-b9c6-f9dd94a3e267 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lazy-loading 'info_cache' on Instance uuid 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2058.313035] env[63024]: DEBUG nova.network.neutron [req-4dd8b8f0-25bd-4f66-a4e2-5992a5e681dc req-90420734-1ce3-490f-ae62-f2a007ff22bc service nova] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Updated VIF entry in instance network info cache for port 340baee8-fd68-482a-94ce-82df41470c62. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2058.313370] env[63024]: DEBUG nova.network.neutron [req-4dd8b8f0-25bd-4f66-a4e2-5992a5e681dc req-90420734-1ce3-490f-ae62-f2a007ff22bc service nova] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Updating instance_info_cache with network_info: [{"id": "340baee8-fd68-482a-94ce-82df41470c62", "address": "fa:16:3e:0c:e1:92", "network": {"id": "ffb24eaf-c6b6-414f-a69a-0c8806712ddd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-281590202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9521048e807c4ca2a6d2e74a72b829a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap340baee8-fd", "ovs_interfaceid": "340baee8-fd68-482a-94ce-82df41470c62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2058.356426] env[63024]: DEBUG nova.compute.manager [None req-de55f60e-a5ba-41ee-9e4b-1715b36a73fd tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2058.356699] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-de55f60e-a5ba-41ee-9e4b-1715b36a73fd tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2058.357584] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-597d8cdc-773c-4a30-990c-566f36ee98be {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.365391] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-de55f60e-a5ba-41ee-9e4b-1715b36a73fd tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2058.365847] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-49e8b7a7-0e09-4a65-ac5f-13fe714de951 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.372952] env[63024]: DEBUG oslo_vmware.api [None req-de55f60e-a5ba-41ee-9e4b-1715b36a73fd tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 2058.372952] env[63024]: value = "task-1951641" [ 2058.372952] env[63024]: _type = "Task" [ 2058.372952] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2058.387952] env[63024]: DEBUG oslo_vmware.api [None req-de55f60e-a5ba-41ee-9e4b-1715b36a73fd tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951641, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2058.394101] env[63024]: DEBUG oslo_vmware.api [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951640, 'name': Rename_Task, 'duration_secs': 0.136347} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2058.394474] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2058.394474] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e9bcc13e-e638-4075-914a-3126e6ab559a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.401174] env[63024]: DEBUG oslo_vmware.api [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Waiting for the task: (returnval){ [ 2058.401174] env[63024]: value = "task-1951642" [ 2058.401174] env[63024]: _type = "Task" [ 2058.401174] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2058.409901] env[63024]: DEBUG oslo_vmware.api [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951642, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2058.439194] env[63024]: DEBUG oslo_vmware.api [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]528bf722-a2ad-8bcc-9a65-5648c9938aba, 'name': SearchDatastore_Task, 'duration_secs': 0.009602} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2058.441139] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2058.441139] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2058.441139] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2058.441139] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2058.441139] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2058.441139] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-69597c89-184d-4b47-9608-6ff9363c7e6d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.453730] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2058.453730] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2058.453730] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-530710f9-961d-4837-bacf-8e5f4294a89e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.460030] env[63024]: DEBUG oslo_vmware.api [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 2058.460030] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52835a6c-abd2-3abd-2593-2e877367d7b6" [ 2058.460030] env[63024]: _type = "Task" [ 2058.460030] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2058.470415] env[63024]: DEBUG oslo_vmware.api [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52835a6c-abd2-3abd-2593-2e877367d7b6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2058.565647] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.415s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2058.566200] env[63024]: DEBUG nova.compute.manager [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2058.570083] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e8102a09-b524-44d8-a7fc-0e1711db2b45 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.003s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2058.570372] env[63024]: DEBUG nova.objects.instance [None req-e8102a09-b524-44d8-a7fc-0e1711db2b45 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lazy-loading 'resources' on Instance uuid 9e32eb32-6eff-4875-b4a3-adfab4647023 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2058.816593] env[63024]: DEBUG oslo_concurrency.lockutils [req-4dd8b8f0-25bd-4f66-a4e2-5992a5e681dc req-90420734-1ce3-490f-ae62-f2a007ff22bc service nova] Releasing lock "refresh_cache-8826c266-659c-46ad-bb02-aefdffab8699" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2058.885798] env[63024]: DEBUG oslo_vmware.api [None req-de55f60e-a5ba-41ee-9e4b-1715b36a73fd tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951641, 'name': PowerOffVM_Task, 'duration_secs': 0.309299} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2058.886098] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-de55f60e-a5ba-41ee-9e4b-1715b36a73fd tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2058.886276] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-de55f60e-a5ba-41ee-9e4b-1715b36a73fd tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2058.886526] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8511b590-2452-4156-a135-5543e49ccf4a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.911070] env[63024]: DEBUG oslo_vmware.api [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951642, 'name': PowerOnVM_Task, 'duration_secs': 0.497298} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2058.911379] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2058.972270] env[63024]: DEBUG oslo_vmware.api [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52835a6c-abd2-3abd-2593-2e877367d7b6, 'name': SearchDatastore_Task, 'duration_secs': 0.009754} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2058.973125] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79408466-9763-4898-b6b0-0a53b6fcb791 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2058.979077] env[63024]: DEBUG oslo_vmware.api [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 2058.979077] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5214e74c-d8c5-a9de-5a4a-13b1183570b1" [ 2058.979077] env[63024]: _type = "Task" [ 2058.979077] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2058.990033] env[63024]: DEBUG oslo_vmware.api [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5214e74c-d8c5-a9de-5a4a-13b1183570b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2058.992331] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-de55f60e-a5ba-41ee-9e4b-1715b36a73fd tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2058.992555] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-de55f60e-a5ba-41ee-9e4b-1715b36a73fd tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2058.992780] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-de55f60e-a5ba-41ee-9e4b-1715b36a73fd tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Deleting the datastore file [datastore1] b588ea21-dea0-4ee6-8f9e-12007d0a1ce1 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2058.993219] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2fd4db95-3590-4efd-a646-819ba433a372 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.002526] env[63024]: DEBUG oslo_vmware.api [None req-de55f60e-a5ba-41ee-9e4b-1715b36a73fd tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for the task: (returnval){ [ 2059.002526] env[63024]: value = "task-1951644" [ 2059.002526] env[63024]: _type = "Task" [ 2059.002526] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2059.011573] env[63024]: DEBUG oslo_vmware.api [None req-de55f60e-a5ba-41ee-9e4b-1715b36a73fd tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951644, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2059.017968] env[63024]: DEBUG nova.compute.manager [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2059.018795] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3df6adec-ee7f-4ff2-9f88-fcb17e7405ce {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.073897] env[63024]: DEBUG nova.compute.utils [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2059.078305] env[63024]: DEBUG nova.compute.manager [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Not allocating networking since 'none' was specified. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 2059.310140] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa803b25-d604-4abc-913f-ea7e993ad9d0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.318317] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-871567bd-3855-4442-a6a2-e75980024243 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.325404] env[63024]: DEBUG nova.network.neutron [None req-fb7841a6-414c-498d-b9c6-f9dd94a3e267 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Updating instance_info_cache with network_info: [{"id": "6e0e9732-b318-4b20-ad72-8c2bc07eaf34", "address": "fa:16:3e:2b:cc:65", "network": {"id": "0cbe22f7-6322-4d92-9a77-2753f6449a2d", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-366684254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.141", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6072e8931d9540ad8fe4a2b4b1ec782d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3ccbdbb-8b49-4a26-913f-2a448b72280f", "external-id": "nsx-vlan-transportzone-412", "segmentation_id": 412, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6e0e9732-b3", "ovs_interfaceid": "6e0e9732-b318-4b20-ad72-8c2bc07eaf34", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2059.352169] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fb7841a6-414c-498d-b9c6-f9dd94a3e267 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Releasing lock "refresh_cache-9716d592-32d1-4f1d-b42b-1c8a7d81d2f2" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2059.352468] env[63024]: DEBUG nova.objects.instance [None req-fb7841a6-414c-498d-b9c6-f9dd94a3e267 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lazy-loading 'migration_context' on Instance uuid 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2059.354060] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3586ed9d-4971-4205-9070-6556771f351d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.365240] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c08fa6b5-8377-4819-a4a4-609dd222d9e3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.379814] env[63024]: DEBUG nova.compute.provider_tree [None req-e8102a09-b524-44d8-a7fc-0e1711db2b45 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2059.490367] env[63024]: DEBUG oslo_vmware.api [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5214e74c-d8c5-a9de-5a4a-13b1183570b1, 'name': SearchDatastore_Task, 'duration_secs': 0.011349} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2059.490675] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2059.490975] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 8826c266-659c-46ad-bb02-aefdffab8699/8826c266-659c-46ad-bb02-aefdffab8699.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2059.491370] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-dae359d1-6dbb-4fcc-828a-a4d0a728b4c4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.499101] env[63024]: DEBUG oslo_vmware.api [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 2059.499101] env[63024]: value = "task-1951645" [ 2059.499101] env[63024]: _type = "Task" [ 2059.499101] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2059.509479] env[63024]: DEBUG oslo_vmware.api [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951645, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2059.514942] env[63024]: DEBUG oslo_vmware.api [None req-de55f60e-a5ba-41ee-9e4b-1715b36a73fd tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Task: {'id': task-1951644, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14529} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2059.515206] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-de55f60e-a5ba-41ee-9e4b-1715b36a73fd tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2059.515443] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-de55f60e-a5ba-41ee-9e4b-1715b36a73fd tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2059.515565] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-de55f60e-a5ba-41ee-9e4b-1715b36a73fd tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2059.515815] env[63024]: INFO nova.compute.manager [None req-de55f60e-a5ba-41ee-9e4b-1715b36a73fd tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Took 1.16 seconds to destroy the instance on the hypervisor. [ 2059.516093] env[63024]: DEBUG oslo.service.loopingcall [None req-de55f60e-a5ba-41ee-9e4b-1715b36a73fd tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2059.516292] env[63024]: DEBUG nova.compute.manager [-] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2059.516389] env[63024]: DEBUG nova.network.neutron [-] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2059.538902] env[63024]: DEBUG oslo_concurrency.lockutils [None req-350cd96a-5c00-4631-a50a-7545c30f2ec1 tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Lock "92d1f96e-bbe7-4654-9d3a-47ba40057157" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 50.236s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2059.579455] env[63024]: DEBUG nova.compute.manager [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2059.858911] env[63024]: DEBUG nova.objects.base [None req-fb7841a6-414c-498d-b9c6-f9dd94a3e267 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Object Instance<9716d592-32d1-4f1d-b42b-1c8a7d81d2f2> lazy-loaded attributes: info_cache,migration_context {{(pid=63024) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2059.859983] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d9bf75e-1056-4edc-b439-52d42fc518cd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.887280] env[63024]: DEBUG nova.scheduler.client.report [None req-e8102a09-b524-44d8-a7fc-0e1711db2b45 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2059.892120] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc79a730-6f8c-4142-a2f9-34dc6d25cf3a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2059.899665] env[63024]: DEBUG oslo_vmware.api [None req-fb7841a6-414c-498d-b9c6-f9dd94a3e267 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 2059.899665] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]525a00ed-77f1-b44a-4d58-5aa94d2258db" [ 2059.899665] env[63024]: _type = "Task" [ 2059.899665] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2059.913793] env[63024]: DEBUG oslo_vmware.api [None req-fb7841a6-414c-498d-b9c6-f9dd94a3e267 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]525a00ed-77f1-b44a-4d58-5aa94d2258db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2059.988807] env[63024]: DEBUG nova.compute.manager [req-a9742a6a-6ebb-41ab-b703-ef2a59e712e2 req-eaa1b0d9-057b-421c-9222-5e3ece5ebcdc service nova] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Received event network-vif-deleted-83b7b8fb-a30a-4852-889a-ec6b339cc100 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2059.989044] env[63024]: INFO nova.compute.manager [req-a9742a6a-6ebb-41ab-b703-ef2a59e712e2 req-eaa1b0d9-057b-421c-9222-5e3ece5ebcdc service nova] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Neutron deleted interface 83b7b8fb-a30a-4852-889a-ec6b339cc100; detaching it from the instance and deleting it from the info cache [ 2059.989231] env[63024]: DEBUG nova.network.neutron [req-a9742a6a-6ebb-41ab-b703-ef2a59e712e2 req-eaa1b0d9-057b-421c-9222-5e3ece5ebcdc service nova] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2060.010829] env[63024]: DEBUG oslo_vmware.api [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951645, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2060.393498] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e8102a09-b524-44d8-a7fc-0e1711db2b45 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.823s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2060.396869] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d4040e73-c55c-432c-99ce-b4d361589cb6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.442s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2060.396869] env[63024]: DEBUG nova.objects.instance [None req-d4040e73-c55c-432c-99ce-b4d361589cb6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lazy-loading 'resources' on Instance uuid 85f31573-5535-4712-b736-747c43ed74b3 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2060.406438] env[63024]: DEBUG nova.network.neutron [-] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2060.411992] env[63024]: DEBUG oslo_vmware.api [None req-fb7841a6-414c-498d-b9c6-f9dd94a3e267 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]525a00ed-77f1-b44a-4d58-5aa94d2258db, 'name': SearchDatastore_Task, 'duration_secs': 0.041153} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2060.412396] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fb7841a6-414c-498d-b9c6-f9dd94a3e267 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2060.419331] env[63024]: INFO nova.scheduler.client.report [None req-e8102a09-b524-44d8-a7fc-0e1711db2b45 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Deleted allocations for instance 9e32eb32-6eff-4875-b4a3-adfab4647023 [ 2060.493623] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0f6bb965-9c2f-4eee-9b22-068ee552a605 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.507447] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8e1388c-a3bc-46e3-9482-dc7f6416fe7e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.525064] env[63024]: DEBUG oslo_vmware.api [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951645, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.766419} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2060.525379] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 8826c266-659c-46ad-bb02-aefdffab8699/8826c266-659c-46ad-bb02-aefdffab8699.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2060.525599] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2060.525864] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c01c5af5-02fc-4f02-b1be-113008897f64 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.541288] env[63024]: DEBUG nova.compute.manager [req-a9742a6a-6ebb-41ab-b703-ef2a59e712e2 req-eaa1b0d9-057b-421c-9222-5e3ece5ebcdc service nova] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Detach interface failed, port_id=83b7b8fb-a30a-4852-889a-ec6b339cc100, reason: Instance b588ea21-dea0-4ee6-8f9e-12007d0a1ce1 could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 2060.542989] env[63024]: DEBUG oslo_vmware.api [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 2060.542989] env[63024]: value = "task-1951646" [ 2060.542989] env[63024]: _type = "Task" [ 2060.542989] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2060.551866] env[63024]: DEBUG oslo_vmware.api [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951646, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2060.589396] env[63024]: DEBUG nova.compute.manager [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2060.618282] env[63024]: DEBUG nova.virt.hardware [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2060.618538] env[63024]: DEBUG nova.virt.hardware [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2060.618708] env[63024]: DEBUG nova.virt.hardware [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2060.618894] env[63024]: DEBUG nova.virt.hardware [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2060.619049] env[63024]: DEBUG nova.virt.hardware [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2060.619240] env[63024]: DEBUG nova.virt.hardware [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2060.619660] env[63024]: DEBUG nova.virt.hardware [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2060.619660] env[63024]: DEBUG nova.virt.hardware [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2060.619862] env[63024]: DEBUG nova.virt.hardware [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2060.620048] env[63024]: DEBUG nova.virt.hardware [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2060.620126] env[63024]: DEBUG nova.virt.hardware [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2060.621131] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71132dd2-59ef-4470-9a37-c24f2086ed91 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.629891] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70726cc1-988f-4af6-b4dd-d69b910638c1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.644593] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Instance VIF info [] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2060.650444] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Creating folder: Project (756cc668199346909cc64367901e829c). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2060.650729] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d6b37f49-ae2e-4021-9a50-0167eb24393d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.661470] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Created folder: Project (756cc668199346909cc64367901e829c) in parent group-v401959. [ 2060.661612] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Creating folder: Instances. Parent ref: group-v402232. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2060.661836] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cd656cf6-e7d1-4e7e-a747-dbfb338b2925 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.672182] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Created folder: Instances in parent group-v402232. [ 2060.672415] env[63024]: DEBUG oslo.service.loopingcall [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2060.672601] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2060.672833] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7f851096-f945-41b7-8928-ba10200f8aeb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2060.691693] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2060.691693] env[63024]: value = "task-1951649" [ 2060.691693] env[63024]: _type = "Task" [ 2060.691693] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2060.700232] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951649, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2060.909108] env[63024]: INFO nova.compute.manager [-] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Took 1.39 seconds to deallocate network for instance. [ 2060.926886] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e8102a09-b524-44d8-a7fc-0e1711db2b45 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "9e32eb32-6eff-4875-b4a3-adfab4647023" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.957s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2061.055824] env[63024]: DEBUG oslo_vmware.api [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951646, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069891} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2061.056117] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2061.056884] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b02278a-6c04-429e-86c8-123c5be7205e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.081436] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] 8826c266-659c-46ad-bb02-aefdffab8699/8826c266-659c-46ad-bb02-aefdffab8699.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2061.084228] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7c735e4b-a8b8-40b6-95b9-35eceb1a0882 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.104716] env[63024]: DEBUG oslo_vmware.api [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 2061.104716] env[63024]: value = "task-1951650" [ 2061.104716] env[63024]: _type = "Task" [ 2061.104716] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2061.117566] env[63024]: DEBUG oslo_vmware.api [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951650, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.160529] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df3b83b5-50d0-4279-8659-720b2bdd59ef {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.169145] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1632f6fd-9504-42b2-8f56-7c5945618b05 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.207286] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5317ed2-82d2-450f-9d03-b98b5940f7a0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.218562] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951649, 'name': CreateVM_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.220185] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-148ff50b-547d-4306-8bfc-c6158f5f223c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.236407] env[63024]: DEBUG nova.compute.provider_tree [None req-d4040e73-c55c-432c-99ce-b4d361589cb6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2061.295986] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e26dfd5-d88d-40ad-8a7c-3f85a494ce73 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.304111] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-6144b94a-0e0d-4f12-835a-631faafbc7fe tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Suspending the VM {{(pid=63024) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 2061.304363] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-722c1624-c2a8-486a-8af8-31795cfd25a7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.311913] env[63024]: DEBUG oslo_vmware.api [None req-6144b94a-0e0d-4f12-835a-631faafbc7fe tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Waiting for the task: (returnval){ [ 2061.311913] env[63024]: value = "task-1951651" [ 2061.311913] env[63024]: _type = "Task" [ 2061.311913] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2061.321883] env[63024]: DEBUG oslo_vmware.api [None req-6144b94a-0e0d-4f12-835a-631faafbc7fe tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951651, 'name': SuspendVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.416896] env[63024]: DEBUG oslo_concurrency.lockutils [None req-de55f60e-a5ba-41ee-9e4b-1715b36a73fd tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2061.616891] env[63024]: DEBUG oslo_vmware.api [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951650, 'name': ReconfigVM_Task, 'duration_secs': 0.293738} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2061.617294] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Reconfigured VM instance instance-00000065 to attach disk [datastore1] 8826c266-659c-46ad-bb02-aefdffab8699/8826c266-659c-46ad-bb02-aefdffab8699.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2061.618056] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6266449a-b4cc-41f1-ae62-c41fc9275268 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.626376] env[63024]: DEBUG oslo_vmware.api [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 2061.626376] env[63024]: value = "task-1951652" [ 2061.626376] env[63024]: _type = "Task" [ 2061.626376] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2061.636741] env[63024]: DEBUG oslo_vmware.api [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951652, 'name': Rename_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.713828] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951649, 'name': CreateVM_Task, 'duration_secs': 0.645147} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2061.714026] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2061.714471] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2061.714638] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2061.714975] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2061.715294] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-863411ad-75ec-4dec-9c3b-3f89ad0517db {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2061.720568] env[63024]: DEBUG oslo_vmware.api [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Waiting for the task: (returnval){ [ 2061.720568] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52700d66-594d-b5ec-65df-b8afb3c02473" [ 2061.720568] env[63024]: _type = "Task" [ 2061.720568] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2061.729058] env[63024]: DEBUG oslo_vmware.api [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52700d66-594d-b5ec-65df-b8afb3c02473, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2061.740105] env[63024]: DEBUG nova.scheduler.client.report [None req-d4040e73-c55c-432c-99ce-b4d361589cb6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2061.822867] env[63024]: DEBUG oslo_vmware.api [None req-6144b94a-0e0d-4f12-835a-631faafbc7fe tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951651, 'name': SuspendVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2062.136940] env[63024]: DEBUG oslo_vmware.api [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951652, 'name': Rename_Task, 'duration_secs': 0.161596} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2062.137235] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2062.137478] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9325822b-36df-470b-b633-21f770c33858 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.143135] env[63024]: DEBUG oslo_concurrency.lockutils [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "1666cff0-59bd-41a0-aa3c-d1e8fac3a49a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2062.143357] env[63024]: DEBUG oslo_concurrency.lockutils [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "1666cff0-59bd-41a0-aa3c-d1e8fac3a49a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2062.145952] env[63024]: DEBUG oslo_vmware.api [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 2062.145952] env[63024]: value = "task-1951653" [ 2062.145952] env[63024]: _type = "Task" [ 2062.145952] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2062.153709] env[63024]: DEBUG oslo_vmware.api [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951653, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2062.233556] env[63024]: DEBUG oslo_vmware.api [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52700d66-594d-b5ec-65df-b8afb3c02473, 'name': SearchDatastore_Task, 'duration_secs': 0.01019} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2062.233862] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2062.234111] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2062.234350] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2062.234495] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2062.234672] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2062.234985] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-90358716-5d6f-4e8f-a93d-6bcb1d31f5f5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.246288] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d4040e73-c55c-432c-99ce-b4d361589cb6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.850s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2062.250150] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 28.043s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2062.251429] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2062.251617] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2062.252851] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e210d84-69f3-47dd-a69f-2878abccdde4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.258971] env[63024]: DEBUG oslo_vmware.api [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Waiting for the task: (returnval){ [ 2062.258971] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]527d188b-dee2-b276-6a4a-4523f786ef77" [ 2062.258971] env[63024]: _type = "Task" [ 2062.258971] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2062.268285] env[63024]: DEBUG oslo_vmware.api [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]527d188b-dee2-b276-6a4a-4523f786ef77, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2062.273950] env[63024]: INFO nova.scheduler.client.report [None req-d4040e73-c55c-432c-99ce-b4d361589cb6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Deleted allocations for instance 85f31573-5535-4712-b736-747c43ed74b3 [ 2062.323990] env[63024]: DEBUG oslo_vmware.api [None req-6144b94a-0e0d-4f12-835a-631faafbc7fe tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951651, 'name': SuspendVM_Task, 'duration_secs': 0.62693} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2062.324263] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-6144b94a-0e0d-4f12-835a-631faafbc7fe tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Suspended the VM {{(pid=63024) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 2062.324407] env[63024]: DEBUG nova.compute.manager [None req-6144b94a-0e0d-4f12-835a-631faafbc7fe tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2062.325214] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00eb690d-178a-4eeb-a254-c3ebc3f60cab {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.646963] env[63024]: DEBUG nova.compute.manager [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2062.659278] env[63024]: DEBUG oslo_vmware.api [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951653, 'name': PowerOnVM_Task, 'duration_secs': 0.495321} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2062.659768] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2062.659768] env[63024]: INFO nova.compute.manager [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Took 8.25 seconds to spawn the instance on the hypervisor. [ 2062.659910] env[63024]: DEBUG nova.compute.manager [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2062.661052] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13a3b912-60ea-4d72-b843-99c62b61c5c2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.771563] env[63024]: DEBUG oslo_vmware.api [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]527d188b-dee2-b276-6a4a-4523f786ef77, 'name': SearchDatastore_Task, 'duration_secs': 0.010791} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2062.772962] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ae035fe-4cc3-4e6b-add9-dbdac4959e3d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.781334] env[63024]: DEBUG oslo_vmware.api [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Waiting for the task: (returnval){ [ 2062.781334] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5244fffd-7a49-9a82-96e4-e134ee7133eb" [ 2062.781334] env[63024]: _type = "Task" [ 2062.781334] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2062.781836] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d4040e73-c55c-432c-99ce-b4d361589cb6 tempest-ServerDiskConfigTestJSON-16708605 tempest-ServerDiskConfigTestJSON-16708605-project-member] Lock "85f31573-5535-4712-b736-747c43ed74b3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.351s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2062.792138] env[63024]: DEBUG oslo_vmware.api [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5244fffd-7a49-9a82-96e4-e134ee7133eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2063.177115] env[63024]: DEBUG oslo_concurrency.lockutils [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2063.179616] env[63024]: INFO nova.compute.manager [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Took 46.02 seconds to build instance. [ 2063.265427] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Applying migration context for instance 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2 as it has an incoming, in-progress migration b427e72e-174f-4ebf-b6c0-a9329bf591f0. Migration status is confirming {{(pid=63024) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 2063.266721] env[63024]: INFO nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Updating resource usage from migration b427e72e-174f-4ebf-b6c0-a9329bf591f0 [ 2063.287833] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance e8ad74ce-7862-4574-98e7-14bc54bd5d6c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2063.288051] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance df2933d1-32c3-48a6-8ceb-d5e3047d0b78 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2063.288310] env[63024]: WARNING nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance b588ea21-dea0-4ee6-8f9e-12007d0a1ce1 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 2063.288526] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance ea24d375-ba88-42ca-a07e-52000ec613c0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2063.288668] env[63024]: WARNING nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance fe6847e2-a742-4338-983f-698c13aaefde is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 2063.288792] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 34e4db8e-e0d9-4a27-9368-c5e711b51a29 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2063.288904] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 92d1f96e-bbe7-4654-9d3a-47ba40057157 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2063.289022] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 3cf2726c-2551-4bbd-8032-006062cdcc39 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2063.289143] env[63024]: WARNING nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance df5a197c-8e35-44a0-8b9c-63dae50b77ff is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 2063.289251] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Migration b427e72e-174f-4ebf-b6c0-a9329bf591f0 is active on this compute host and has allocations in placement: {'resources': {'VCPU': 1, 'MEMORY_MB': 192, 'DISK_GB': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 2063.289357] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2063.289460] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 8826c266-659c-46ad-bb02-aefdffab8699 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2063.289564] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance c5541241-84e2-4216-b6f9-4c716f29d759 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2063.296471] env[63024]: DEBUG oslo_vmware.api [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5244fffd-7a49-9a82-96e4-e134ee7133eb, 'name': SearchDatastore_Task, 'duration_secs': 0.038757} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2063.297035] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2063.297338] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] c5541241-84e2-4216-b6f9-4c716f29d759/c5541241-84e2-4216-b6f9-4c716f29d759.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2063.297645] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-71c50a9a-ae29-4227-8951-0bf6d8bbf8e6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.306432] env[63024]: DEBUG oslo_vmware.api [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Waiting for the task: (returnval){ [ 2063.306432] env[63024]: value = "task-1951654" [ 2063.306432] env[63024]: _type = "Task" [ 2063.306432] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2063.316064] env[63024]: DEBUG oslo_vmware.api [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Task: {'id': task-1951654, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2063.688720] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3bf32eb8-4508-42e6-aed3-3b5bef7ea4cc tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "8826c266-659c-46ad-bb02-aefdffab8699" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.540s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2063.689162] env[63024]: INFO nova.compute.manager [None req-074275e8-4fdc-46aa-a7b2-beabfcf3c6fd tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Resuming [ 2063.689759] env[63024]: DEBUG nova.objects.instance [None req-074275e8-4fdc-46aa-a7b2-beabfcf3c6fd tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Lazy-loading 'flavor' on Instance uuid 92d1f96e-bbe7-4654-9d3a-47ba40057157 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2063.798370] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 0d253199-adf8-45c0-a6bf-b11c12b08688 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2063.820206] env[63024]: DEBUG oslo_vmware.api [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Task: {'id': task-1951654, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2064.301540] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 5192ad93-a4e9-4aa0-983d-186ab17360f0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2064.323137] env[63024]: DEBUG oslo_vmware.api [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Task: {'id': task-1951654, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.590596} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2064.323563] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] c5541241-84e2-4216-b6f9-4c716f29d759/c5541241-84e2-4216-b6f9-4c716f29d759.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2064.323915] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2064.324306] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e83e4509-1b05-4575-99cb-22fd5b9ce49a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.334177] env[63024]: DEBUG oslo_vmware.api [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Waiting for the task: (returnval){ [ 2064.334177] env[63024]: value = "task-1951655" [ 2064.334177] env[63024]: _type = "Task" [ 2064.334177] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2064.344907] env[63024]: DEBUG oslo_vmware.api [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Task: {'id': task-1951655, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2064.699781] env[63024]: DEBUG oslo_concurrency.lockutils [None req-074275e8-4fdc-46aa-a7b2-beabfcf3c6fd tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Acquiring lock "refresh_cache-92d1f96e-bbe7-4654-9d3a-47ba40057157" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2064.700074] env[63024]: DEBUG oslo_concurrency.lockutils [None req-074275e8-4fdc-46aa-a7b2-beabfcf3c6fd tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Acquired lock "refresh_cache-92d1f96e-bbe7-4654-9d3a-47ba40057157" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2064.700172] env[63024]: DEBUG nova.network.neutron [None req-074275e8-4fdc-46aa-a7b2-beabfcf3c6fd tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2064.807849] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 881b1f35-206e-4c3f-bf7a-d1774a9343c2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2064.847215] env[63024]: DEBUG oslo_vmware.api [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Task: {'id': task-1951655, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063893} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2064.847215] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2064.847671] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e7a4c96-b057-45cd-bf9c-18aa1f4d8bb5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.853165] env[63024]: DEBUG nova.compute.manager [req-30176d5a-4ac1-438d-80af-d97f1de7a7c9 req-21b03e49-dc2a-4b77-b748-a669df20985c service nova] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Received event network-changed-041c096f-ef1b-49ad-aadb-469b89fe4f25 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2064.853355] env[63024]: DEBUG nova.compute.manager [req-30176d5a-4ac1-438d-80af-d97f1de7a7c9 req-21b03e49-dc2a-4b77-b748-a669df20985c service nova] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Refreshing instance network info cache due to event network-changed-041c096f-ef1b-49ad-aadb-469b89fe4f25. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2064.853566] env[63024]: DEBUG oslo_concurrency.lockutils [req-30176d5a-4ac1-438d-80af-d97f1de7a7c9 req-21b03e49-dc2a-4b77-b748-a669df20985c service nova] Acquiring lock "refresh_cache-34e4db8e-e0d9-4a27-9368-c5e711b51a29" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2064.853710] env[63024]: DEBUG oslo_concurrency.lockutils [req-30176d5a-4ac1-438d-80af-d97f1de7a7c9 req-21b03e49-dc2a-4b77-b748-a669df20985c service nova] Acquired lock "refresh_cache-34e4db8e-e0d9-4a27-9368-c5e711b51a29" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2064.853899] env[63024]: DEBUG nova.network.neutron [req-30176d5a-4ac1-438d-80af-d97f1de7a7c9 req-21b03e49-dc2a-4b77-b748-a669df20985c service nova] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Refreshing network info cache for port 041c096f-ef1b-49ad-aadb-469b89fe4f25 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2064.877291] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] c5541241-84e2-4216-b6f9-4c716f29d759/c5541241-84e2-4216-b6f9-4c716f29d759.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2064.878627] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-907b6c8b-9fb7-42bc-aa88-677296c32156 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.909506] env[63024]: DEBUG oslo_vmware.api [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Waiting for the task: (returnval){ [ 2064.909506] env[63024]: value = "task-1951656" [ 2064.909506] env[63024]: _type = "Task" [ 2064.909506] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2064.918929] env[63024]: DEBUG oslo_vmware.api [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Task: {'id': task-1951656, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2065.311943] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 2aa624cb-b36a-43c9-8407-37383f196563 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2065.419533] env[63024]: DEBUG oslo_vmware.api [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Task: {'id': task-1951656, 'name': ReconfigVM_Task, 'duration_secs': 0.432195} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2065.419812] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Reconfigured VM instance instance-00000066 to attach disk [datastore1] c5541241-84e2-4216-b6f9-4c716f29d759/c5541241-84e2-4216-b6f9-4c716f29d759.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2065.420539] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-126c92e7-8dc5-411a-b801-9da7a293b3b4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.428277] env[63024]: DEBUG oslo_vmware.api [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Waiting for the task: (returnval){ [ 2065.428277] env[63024]: value = "task-1951657" [ 2065.428277] env[63024]: _type = "Task" [ 2065.428277] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2065.437615] env[63024]: DEBUG oslo_vmware.api [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Task: {'id': task-1951657, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2065.673726] env[63024]: DEBUG nova.network.neutron [None req-074275e8-4fdc-46aa-a7b2-beabfcf3c6fd tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Updating instance_info_cache with network_info: [{"id": "05bc00c8-444d-425a-8c1e-0d34f269c7e8", "address": "fa:16:3e:d8:3a:2e", "network": {"id": "feb2323b-f3cf-42d6-a22b-81d1c94fce9d", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-75667819-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6f87eadd82394447910efa7b71814e97", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "44ed8f45-cb8e-40e7-ac70-a7f386a7d2c2", "external-id": "nsx-vlan-transportzone-268", "segmentation_id": 268, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05bc00c8-44", "ovs_interfaceid": "05bc00c8-444d-425a-8c1e-0d34f269c7e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2065.816301] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2065.817188] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2065.817438] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2496MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2065.823540] env[63024]: DEBUG nova.network.neutron [req-30176d5a-4ac1-438d-80af-d97f1de7a7c9 req-21b03e49-dc2a-4b77-b748-a669df20985c service nova] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Updated VIF entry in instance network info cache for port 041c096f-ef1b-49ad-aadb-469b89fe4f25. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2065.823745] env[63024]: DEBUG nova.network.neutron [req-30176d5a-4ac1-438d-80af-d97f1de7a7c9 req-21b03e49-dc2a-4b77-b748-a669df20985c service nova] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Updating instance_info_cache with network_info: [{"id": "041c096f-ef1b-49ad-aadb-469b89fe4f25", "address": "fa:16:3e:8b:4e:6d", "network": {"id": "ffb24eaf-c6b6-414f-a69a-0c8806712ddd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-281590202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9521048e807c4ca2a6d2e74a72b829a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap041c096f-ef", "ovs_interfaceid": "041c096f-ef1b-49ad-aadb-469b89fe4f25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2065.941728] env[63024]: DEBUG oslo_vmware.api [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Task: {'id': task-1951657, 'name': Rename_Task, 'duration_secs': 0.15466} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2065.942012] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2065.942400] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d579e0a5-2fc1-4235-9421-3c3c2cfd857f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2065.955018] env[63024]: DEBUG oslo_vmware.api [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Waiting for the task: (returnval){ [ 2065.955018] env[63024]: value = "task-1951658" [ 2065.955018] env[63024]: _type = "Task" [ 2065.955018] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2065.961785] env[63024]: DEBUG oslo_vmware.api [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Task: {'id': task-1951658, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2066.110603] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f96553e8-85f1-4ed4-b968-800a1002c00f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.119911] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fc2bd10-0315-40e0-94fe-97ad4d5efe2e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.154611] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ef0acac-125a-410a-8256-c1de11d59a31 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.163006] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67dade4d-8aa1-449d-86de-1c832f753ef2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.177450] env[63024]: DEBUG oslo_concurrency.lockutils [None req-074275e8-4fdc-46aa-a7b2-beabfcf3c6fd tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Releasing lock "refresh_cache-92d1f96e-bbe7-4654-9d3a-47ba40057157" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2066.177659] env[63024]: DEBUG nova.compute.provider_tree [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2066.179549] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04772d6b-ff34-439a-9f23-9a46684c5346 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.186189] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-074275e8-4fdc-46aa-a7b2-beabfcf3c6fd tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Resuming the VM {{(pid=63024) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 2066.186438] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-51e5d024-2e1a-4394-8838-4794a6c4d592 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.194088] env[63024]: DEBUG oslo_vmware.api [None req-074275e8-4fdc-46aa-a7b2-beabfcf3c6fd tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Waiting for the task: (returnval){ [ 2066.194088] env[63024]: value = "task-1951659" [ 2066.194088] env[63024]: _type = "Task" [ 2066.194088] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2066.202695] env[63024]: DEBUG oslo_vmware.api [None req-074275e8-4fdc-46aa-a7b2-beabfcf3c6fd tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951659, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2066.326696] env[63024]: DEBUG oslo_concurrency.lockutils [req-30176d5a-4ac1-438d-80af-d97f1de7a7c9 req-21b03e49-dc2a-4b77-b748-a669df20985c service nova] Releasing lock "refresh_cache-34e4db8e-e0d9-4a27-9368-c5e711b51a29" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2066.462797] env[63024]: DEBUG oslo_vmware.api [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Task: {'id': task-1951658, 'name': PowerOnVM_Task, 'duration_secs': 0.491572} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2066.463099] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2066.463318] env[63024]: INFO nova.compute.manager [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Took 5.87 seconds to spawn the instance on the hypervisor. [ 2066.463502] env[63024]: DEBUG nova.compute.manager [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2066.464297] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3332c36e-59e5-4aad-99b6-918a9fa6c65d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2066.545602] env[63024]: DEBUG nova.compute.manager [req-92aced40-f5f1-4ce0-a54f-7502796f06f7 req-725bd48c-de99-4783-85a8-3c1d3572ae6f service nova] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Received event network-changed-340baee8-fd68-482a-94ce-82df41470c62 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2066.545826] env[63024]: DEBUG nova.compute.manager [req-92aced40-f5f1-4ce0-a54f-7502796f06f7 req-725bd48c-de99-4783-85a8-3c1d3572ae6f service nova] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Refreshing instance network info cache due to event network-changed-340baee8-fd68-482a-94ce-82df41470c62. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2066.546493] env[63024]: DEBUG oslo_concurrency.lockutils [req-92aced40-f5f1-4ce0-a54f-7502796f06f7 req-725bd48c-de99-4783-85a8-3c1d3572ae6f service nova] Acquiring lock "refresh_cache-8826c266-659c-46ad-bb02-aefdffab8699" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2066.546758] env[63024]: DEBUG oslo_concurrency.lockutils [req-92aced40-f5f1-4ce0-a54f-7502796f06f7 req-725bd48c-de99-4783-85a8-3c1d3572ae6f service nova] Acquired lock "refresh_cache-8826c266-659c-46ad-bb02-aefdffab8699" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2066.546758] env[63024]: DEBUG nova.network.neutron [req-92aced40-f5f1-4ce0-a54f-7502796f06f7 req-725bd48c-de99-4783-85a8-3c1d3572ae6f service nova] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Refreshing network info cache for port 340baee8-fd68-482a-94ce-82df41470c62 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2066.684704] env[63024]: DEBUG nova.scheduler.client.report [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2066.710818] env[63024]: DEBUG oslo_vmware.api [None req-074275e8-4fdc-46aa-a7b2-beabfcf3c6fd tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951659, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2066.888307] env[63024]: DEBUG nova.compute.manager [req-538b7647-e9b4-4977-8143-a489806350ed req-a5f83331-7173-40f1-80ec-4689777db712 service nova] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Received event network-changed-340baee8-fd68-482a-94ce-82df41470c62 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2066.888608] env[63024]: DEBUG nova.compute.manager [req-538b7647-e9b4-4977-8143-a489806350ed req-a5f83331-7173-40f1-80ec-4689777db712 service nova] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Refreshing instance network info cache due to event network-changed-340baee8-fd68-482a-94ce-82df41470c62. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2066.889165] env[63024]: DEBUG oslo_concurrency.lockutils [req-538b7647-e9b4-4977-8143-a489806350ed req-a5f83331-7173-40f1-80ec-4689777db712 service nova] Acquiring lock "refresh_cache-8826c266-659c-46ad-bb02-aefdffab8699" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2066.985492] env[63024]: INFO nova.compute.manager [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Took 40.69 seconds to build instance. [ 2067.194384] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63024) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2067.194640] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.945s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2067.194898] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.958s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2067.196605] env[63024]: INFO nova.compute.claims [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2067.210729] env[63024]: DEBUG oslo_vmware.api [None req-074275e8-4fdc-46aa-a7b2-beabfcf3c6fd tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951659, 'name': PowerOnVM_Task, 'duration_secs': 0.623051} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2067.211019] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-074275e8-4fdc-46aa-a7b2-beabfcf3c6fd tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Resumed the VM {{(pid=63024) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 2067.211231] env[63024]: DEBUG nova.compute.manager [None req-074275e8-4fdc-46aa-a7b2-beabfcf3c6fd tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2067.214166] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78abd42b-8d3a-468c-8b17-257e23a96fe7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.484671] env[63024]: DEBUG nova.network.neutron [req-92aced40-f5f1-4ce0-a54f-7502796f06f7 req-725bd48c-de99-4783-85a8-3c1d3572ae6f service nova] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Updated VIF entry in instance network info cache for port 340baee8-fd68-482a-94ce-82df41470c62. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2067.485061] env[63024]: DEBUG nova.network.neutron [req-92aced40-f5f1-4ce0-a54f-7502796f06f7 req-725bd48c-de99-4783-85a8-3c1d3572ae6f service nova] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Updating instance_info_cache with network_info: [{"id": "340baee8-fd68-482a-94ce-82df41470c62", "address": "fa:16:3e:0c:e1:92", "network": {"id": "ffb24eaf-c6b6-414f-a69a-0c8806712ddd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-281590202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9521048e807c4ca2a6d2e74a72b829a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap340baee8-fd", "ovs_interfaceid": "340baee8-fd68-482a-94ce-82df41470c62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2067.486565] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e2619b24-e7ae-45cf-9575-479bdc4a0699 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Lock "c5541241-84e2-4216-b6f9-4c716f29d759" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.200s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2067.776314] env[63024]: INFO nova.compute.manager [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Rebuilding instance [ 2067.829018] env[63024]: DEBUG nova.compute.manager [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2067.829018] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bc67b05-37b9-447c-b878-7af0fab0f7bf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2067.990091] env[63024]: DEBUG oslo_concurrency.lockutils [req-92aced40-f5f1-4ce0-a54f-7502796f06f7 req-725bd48c-de99-4783-85a8-3c1d3572ae6f service nova] Releasing lock "refresh_cache-8826c266-659c-46ad-bb02-aefdffab8699" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2067.990091] env[63024]: DEBUG oslo_concurrency.lockutils [req-538b7647-e9b4-4977-8143-a489806350ed req-a5f83331-7173-40f1-80ec-4689777db712 service nova] Acquired lock "refresh_cache-8826c266-659c-46ad-bb02-aefdffab8699" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2067.990091] env[63024]: DEBUG nova.network.neutron [req-538b7647-e9b4-4977-8143-a489806350ed req-a5f83331-7173-40f1-80ec-4689777db712 service nova] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Refreshing network info cache for port 340baee8-fd68-482a-94ce-82df41470c62 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2068.268849] env[63024]: DEBUG oslo_concurrency.lockutils [None req-25d9905c-bdd0-41b5-8e63-37abb5b82df9 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "interface-34e4db8e-e0d9-4a27-9368-c5e711b51a29-b0190de6-3c0b-430e-9952-40bdf36d8b58" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2068.268849] env[63024]: DEBUG oslo_concurrency.lockutils [None req-25d9905c-bdd0-41b5-8e63-37abb5b82df9 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "interface-34e4db8e-e0d9-4a27-9368-c5e711b51a29-b0190de6-3c0b-430e-9952-40bdf36d8b58" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2068.268991] env[63024]: DEBUG nova.objects.instance [None req-25d9905c-bdd0-41b5-8e63-37abb5b82df9 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lazy-loading 'flavor' on Instance uuid 34e4db8e-e0d9-4a27-9368-c5e711b51a29 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2068.480889] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12efc7ea-7f53-4af3-a11c-b09d23edf720 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.494246] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10ba1783-1ff5-47d3-85e2-9e5fb865ec28 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.532673] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c01e830-3380-41c4-bf63-954963aac2c8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.546074] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5faf307e-a144-4d2e-9037-3262a21b9e60 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.562365] env[63024]: DEBUG nova.compute.provider_tree [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2068.841978] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2068.842335] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8738900c-3fe1-4463-bfe6-fc7a96afbf71 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2068.847403] env[63024]: DEBUG nova.network.neutron [req-538b7647-e9b4-4977-8143-a489806350ed req-a5f83331-7173-40f1-80ec-4689777db712 service nova] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Updated VIF entry in instance network info cache for port 340baee8-fd68-482a-94ce-82df41470c62. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2068.851096] env[63024]: DEBUG nova.network.neutron [req-538b7647-e9b4-4977-8143-a489806350ed req-a5f83331-7173-40f1-80ec-4689777db712 service nova] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Updating instance_info_cache with network_info: [{"id": "340baee8-fd68-482a-94ce-82df41470c62", "address": "fa:16:3e:0c:e1:92", "network": {"id": "ffb24eaf-c6b6-414f-a69a-0c8806712ddd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-281590202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9521048e807c4ca2a6d2e74a72b829a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap340baee8-fd", "ovs_interfaceid": "340baee8-fd68-482a-94ce-82df41470c62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2068.853901] env[63024]: DEBUG oslo_vmware.api [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Waiting for the task: (returnval){ [ 2068.853901] env[63024]: value = "task-1951660" [ 2068.853901] env[63024]: _type = "Task" [ 2068.853901] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2068.862638] env[63024]: DEBUG oslo_vmware.api [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Task: {'id': task-1951660, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2069.069188] env[63024]: DEBUG nova.scheduler.client.report [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2069.138312] env[63024]: DEBUG nova.compute.manager [req-5e10d44c-d806-480d-a195-86cd865de93a req-771bc9ea-1811-498a-a397-3a639f142843 service nova] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Received event network-changed-041c096f-ef1b-49ad-aadb-469b89fe4f25 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2069.138516] env[63024]: DEBUG nova.compute.manager [req-5e10d44c-d806-480d-a195-86cd865de93a req-771bc9ea-1811-498a-a397-3a639f142843 service nova] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Refreshing instance network info cache due to event network-changed-041c096f-ef1b-49ad-aadb-469b89fe4f25. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2069.139422] env[63024]: DEBUG oslo_concurrency.lockutils [req-5e10d44c-d806-480d-a195-86cd865de93a req-771bc9ea-1811-498a-a397-3a639f142843 service nova] Acquiring lock "refresh_cache-34e4db8e-e0d9-4a27-9368-c5e711b51a29" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2069.139422] env[63024]: DEBUG oslo_concurrency.lockutils [req-5e10d44c-d806-480d-a195-86cd865de93a req-771bc9ea-1811-498a-a397-3a639f142843 service nova] Acquired lock "refresh_cache-34e4db8e-e0d9-4a27-9368-c5e711b51a29" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2069.139941] env[63024]: DEBUG nova.network.neutron [req-5e10d44c-d806-480d-a195-86cd865de93a req-771bc9ea-1811-498a-a397-3a639f142843 service nova] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Refreshing network info cache for port 041c096f-ef1b-49ad-aadb-469b89fe4f25 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2069.159488] env[63024]: DEBUG nova.objects.instance [None req-25d9905c-bdd0-41b5-8e63-37abb5b82df9 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lazy-loading 'pci_requests' on Instance uuid 34e4db8e-e0d9-4a27-9368-c5e711b51a29 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2069.352852] env[63024]: DEBUG oslo_concurrency.lockutils [req-538b7647-e9b4-4977-8143-a489806350ed req-a5f83331-7173-40f1-80ec-4689777db712 service nova] Releasing lock "refresh_cache-8826c266-659c-46ad-bb02-aefdffab8699" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2069.364686] env[63024]: DEBUG oslo_vmware.api [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Task: {'id': task-1951660, 'name': PowerOffVM_Task, 'duration_secs': 0.167675} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2069.364965] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2069.365230] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2069.366171] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2236bfac-6e03-4108-9d3c-5f850ac73a00 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.373132] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2069.373367] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d2e08add-a686-4f81-9c55-8fd789d930f0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.404696] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2069.404928] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2069.405133] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Deleting the datastore file [datastore1] c5541241-84e2-4216-b6f9-4c716f29d759 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2069.405403] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8e888b4a-784c-40c6-ae8c-ee8dac39f6ab {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2069.415160] env[63024]: DEBUG oslo_vmware.api [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Waiting for the task: (returnval){ [ 2069.415160] env[63024]: value = "task-1951662" [ 2069.415160] env[63024]: _type = "Task" [ 2069.415160] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2069.423199] env[63024]: DEBUG oslo_vmware.api [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Task: {'id': task-1951662, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2069.575219] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.380s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2069.575967] env[63024]: DEBUG nova.compute.manager [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2069.578551] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5721a3c5-619f-4791-b8af-3040ef3a9475 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 22.673s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2069.579532] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5721a3c5-619f-4791-b8af-3040ef3a9475 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2069.581784] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b2a1a45a-d2e8-4fb5-81fb-4155d202aa5e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.685s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2069.582375] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b2a1a45a-d2e8-4fb5-81fb-4155d202aa5e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2069.584437] env[63024]: DEBUG oslo_concurrency.lockutils [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.417s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2069.586729] env[63024]: INFO nova.compute.claims [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2069.613020] env[63024]: INFO nova.scheduler.client.report [None req-5721a3c5-619f-4791-b8af-3040ef3a9475 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Deleted allocations for instance df5a197c-8e35-44a0-8b9c-63dae50b77ff [ 2069.644830] env[63024]: INFO nova.scheduler.client.report [None req-b2a1a45a-d2e8-4fb5-81fb-4155d202aa5e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Deleted allocations for instance fe6847e2-a742-4338-983f-698c13aaefde [ 2069.662690] env[63024]: DEBUG nova.objects.base [None req-25d9905c-bdd0-41b5-8e63-37abb5b82df9 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Object Instance<34e4db8e-e0d9-4a27-9368-c5e711b51a29> lazy-loaded attributes: flavor,pci_requests {{(pid=63024) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2069.662924] env[63024]: DEBUG nova.network.neutron [None req-25d9905c-bdd0-41b5-8e63-37abb5b82df9 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2069.789873] env[63024]: DEBUG nova.policy [None req-25d9905c-bdd0-41b5-8e63-37abb5b82df9 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6fc84a6eed984429b26a693ce7b0876e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9521048e807c4ca2a6d2e74a72b829a3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 2069.926402] env[63024]: DEBUG oslo_vmware.api [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Task: {'id': task-1951662, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.217399} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2069.926715] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2069.926913] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2069.927109] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2070.091151] env[63024]: DEBUG nova.compute.utils [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2070.096313] env[63024]: DEBUG nova.compute.manager [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2070.096516] env[63024]: DEBUG nova.network.neutron [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2070.102175] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Acquiring lock "e4d6e79b-f110-44c2-8201-926b57eeb68d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2070.102443] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Lock "e4d6e79b-f110-44c2-8201-926b57eeb68d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2070.123307] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5721a3c5-619f-4791-b8af-3040ef3a9475 tempest-InstanceActionsV221TestJSON-1344727511 tempest-InstanceActionsV221TestJSON-1344727511-project-member] Lock "df5a197c-8e35-44a0-8b9c-63dae50b77ff" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.839s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2070.138949] env[63024]: DEBUG nova.network.neutron [req-5e10d44c-d806-480d-a195-86cd865de93a req-771bc9ea-1811-498a-a397-3a639f142843 service nova] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Updated VIF entry in instance network info cache for port 041c096f-ef1b-49ad-aadb-469b89fe4f25. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2070.139322] env[63024]: DEBUG nova.network.neutron [req-5e10d44c-d806-480d-a195-86cd865de93a req-771bc9ea-1811-498a-a397-3a639f142843 service nova] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Updating instance_info_cache with network_info: [{"id": "041c096f-ef1b-49ad-aadb-469b89fe4f25", "address": "fa:16:3e:8b:4e:6d", "network": {"id": "ffb24eaf-c6b6-414f-a69a-0c8806712ddd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-281590202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9521048e807c4ca2a6d2e74a72b829a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap041c096f-ef", "ovs_interfaceid": "041c096f-ef1b-49ad-aadb-469b89fe4f25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2070.155369] env[63024]: DEBUG oslo_concurrency.lockutils [None req-b2a1a45a-d2e8-4fb5-81fb-4155d202aa5e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "fe6847e2-a742-4338-983f-698c13aaefde" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.252s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2070.216227] env[63024]: DEBUG nova.policy [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a2cc094a0a6b444ab1880fcfb1de4e8b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6bbfeec6d47746328f185acd132e0d5a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 2070.603863] env[63024]: DEBUG nova.compute.manager [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2070.610635] env[63024]: DEBUG nova.compute.manager [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2070.643626] env[63024]: DEBUG oslo_concurrency.lockutils [req-5e10d44c-d806-480d-a195-86cd865de93a req-771bc9ea-1811-498a-a397-3a639f142843 service nova] Releasing lock "refresh_cache-34e4db8e-e0d9-4a27-9368-c5e711b51a29" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2070.662102] env[63024]: DEBUG nova.network.neutron [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Successfully created port: 5d2fbce5-45d5-4e27-bc41-6f77e528f245 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2070.835816] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6e569c3-f872-4623-b2c3-46c1406607f4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.848016] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe331d12-d50f-4bcc-b36e-38d5d14f8b04 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.876986] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21ac9600-d8a3-4012-9071-ff63d281fb03 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.884778] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c24b5e49-7a83-486a-9401-7e55ed8ed690 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.899707] env[63024]: DEBUG nova.compute.provider_tree [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2070.962612] env[63024]: DEBUG nova.virt.hardware [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2070.962950] env[63024]: DEBUG nova.virt.hardware [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2070.963182] env[63024]: DEBUG nova.virt.hardware [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2070.963419] env[63024]: DEBUG nova.virt.hardware [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2070.963613] env[63024]: DEBUG nova.virt.hardware [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2070.963837] env[63024]: DEBUG nova.virt.hardware [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2070.964163] env[63024]: DEBUG nova.virt.hardware [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2070.964388] env[63024]: DEBUG nova.virt.hardware [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2070.964610] env[63024]: DEBUG nova.virt.hardware [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2070.964872] env[63024]: DEBUG nova.virt.hardware [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2070.965157] env[63024]: DEBUG nova.virt.hardware [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2070.966061] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29326a84-762a-4877-92bb-855ea528accf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.975271] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2307c24-f93c-41dd-ad50-b3565d252c3c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2070.989623] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Instance VIF info [] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2070.995224] env[63024]: DEBUG oslo.service.loopingcall [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2070.995538] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2070.995879] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1b4dc733-3ba8-44d3-8058-2f0eb91ec991 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.013072] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2071.013072] env[63024]: value = "task-1951663" [ 2071.013072] env[63024]: _type = "Task" [ 2071.013072] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2071.022130] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951663, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2071.131841] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2071.403683] env[63024]: DEBUG nova.scheduler.client.report [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2071.525717] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951663, 'name': CreateVM_Task, 'duration_secs': 0.313719} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2071.525842] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2071.526358] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2071.526526] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2071.526842] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2071.527118] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9fb80b41-8629-4ce1-bba8-add0ec446660 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.532282] env[63024]: DEBUG oslo_vmware.api [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Waiting for the task: (returnval){ [ 2071.532282] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52206fb0-1f12-53f1-d52d-7ed386265b7c" [ 2071.532282] env[63024]: _type = "Task" [ 2071.532282] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2071.540508] env[63024]: DEBUG oslo_vmware.api [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52206fb0-1f12-53f1-d52d-7ed386265b7c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2071.618556] env[63024]: DEBUG nova.compute.manager [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2071.647779] env[63024]: DEBUG nova.virt.hardware [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2071.648051] env[63024]: DEBUG nova.virt.hardware [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2071.648224] env[63024]: DEBUG nova.virt.hardware [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2071.648482] env[63024]: DEBUG nova.virt.hardware [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2071.648671] env[63024]: DEBUG nova.virt.hardware [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2071.648825] env[63024]: DEBUG nova.virt.hardware [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2071.649044] env[63024]: DEBUG nova.virt.hardware [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2071.649215] env[63024]: DEBUG nova.virt.hardware [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2071.649382] env[63024]: DEBUG nova.virt.hardware [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2071.649544] env[63024]: DEBUG nova.virt.hardware [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2071.649713] env[63024]: DEBUG nova.virt.hardware [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2071.650600] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbacd172-7818-47d3-b8b5-fdd759cdaeae {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.659287] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-265309dc-01e4-4953-ab4c-dd66baa48ed8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2071.718456] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "cb038d54-b785-4930-b8a5-b309c5f4b58d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2071.718456] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "cb038d54-b785-4930-b8a5-b309c5f4b58d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2071.772052] env[63024]: DEBUG nova.network.neutron [None req-25d9905c-bdd0-41b5-8e63-37abb5b82df9 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Successfully updated port: b0190de6-3c0b-430e-9952-40bdf36d8b58 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2071.909159] env[63024]: DEBUG oslo_concurrency.lockutils [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.325s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2071.909705] env[63024]: DEBUG nova.compute.manager [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2071.912506] env[63024]: DEBUG oslo_concurrency.lockutils [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.028s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2071.914011] env[63024]: INFO nova.compute.claims [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2072.043028] env[63024]: DEBUG oslo_vmware.api [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52206fb0-1f12-53f1-d52d-7ed386265b7c, 'name': SearchDatastore_Task, 'duration_secs': 0.01055} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2072.043340] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2072.043582] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2072.043850] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2072.044017] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2072.044220] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2072.044491] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5d5bd4c1-4b6b-45eb-a17e-918d2da6139f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.057672] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2072.057939] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2072.058693] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b8bdb990-446c-43de-a9f8-8097c13809c7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.063983] env[63024]: DEBUG nova.compute.manager [req-47212c88-595e-4206-820f-be4531bdb67c req-fac13125-f59f-44c7-90d6-0e190fa2610f service nova] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Received event network-vif-plugged-b0190de6-3c0b-430e-9952-40bdf36d8b58 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2072.064110] env[63024]: DEBUG oslo_concurrency.lockutils [req-47212c88-595e-4206-820f-be4531bdb67c req-fac13125-f59f-44c7-90d6-0e190fa2610f service nova] Acquiring lock "34e4db8e-e0d9-4a27-9368-c5e711b51a29-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2072.064334] env[63024]: DEBUG oslo_concurrency.lockutils [req-47212c88-595e-4206-820f-be4531bdb67c req-fac13125-f59f-44c7-90d6-0e190fa2610f service nova] Lock "34e4db8e-e0d9-4a27-9368-c5e711b51a29-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2072.064512] env[63024]: DEBUG oslo_concurrency.lockutils [req-47212c88-595e-4206-820f-be4531bdb67c req-fac13125-f59f-44c7-90d6-0e190fa2610f service nova] Lock "34e4db8e-e0d9-4a27-9368-c5e711b51a29-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2072.064685] env[63024]: DEBUG nova.compute.manager [req-47212c88-595e-4206-820f-be4531bdb67c req-fac13125-f59f-44c7-90d6-0e190fa2610f service nova] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] No waiting events found dispatching network-vif-plugged-b0190de6-3c0b-430e-9952-40bdf36d8b58 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2072.064850] env[63024]: WARNING nova.compute.manager [req-47212c88-595e-4206-820f-be4531bdb67c req-fac13125-f59f-44c7-90d6-0e190fa2610f service nova] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Received unexpected event network-vif-plugged-b0190de6-3c0b-430e-9952-40bdf36d8b58 for instance with vm_state active and task_state None. [ 2072.066636] env[63024]: DEBUG oslo_vmware.api [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Waiting for the task: (returnval){ [ 2072.066636] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52fc1090-4171-35d3-5ebe-286e9feb3d6a" [ 2072.066636] env[63024]: _type = "Task" [ 2072.066636] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2072.076978] env[63024]: DEBUG oslo_vmware.api [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52fc1090-4171-35d3-5ebe-286e9feb3d6a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2072.219674] env[63024]: DEBUG nova.compute.manager [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2072.274459] env[63024]: DEBUG oslo_concurrency.lockutils [None req-25d9905c-bdd0-41b5-8e63-37abb5b82df9 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "refresh_cache-34e4db8e-e0d9-4a27-9368-c5e711b51a29" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2072.274639] env[63024]: DEBUG oslo_concurrency.lockutils [None req-25d9905c-bdd0-41b5-8e63-37abb5b82df9 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquired lock "refresh_cache-34e4db8e-e0d9-4a27-9368-c5e711b51a29" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2072.274959] env[63024]: DEBUG nova.network.neutron [None req-25d9905c-bdd0-41b5-8e63-37abb5b82df9 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2072.369158] env[63024]: DEBUG nova.network.neutron [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Successfully updated port: 5d2fbce5-45d5-4e27-bc41-6f77e528f245 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2072.420949] env[63024]: DEBUG nova.compute.utils [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2072.422402] env[63024]: DEBUG nova.compute.manager [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2072.422571] env[63024]: DEBUG nova.network.neutron [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2072.474301] env[63024]: DEBUG nova.policy [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '07af525e7d7f4f9783339f4f5aa58f5b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '5dcb1fcc9fd945cb9f4477fe1cce3f5b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 2072.579328] env[63024]: DEBUG oslo_vmware.api [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52fc1090-4171-35d3-5ebe-286e9feb3d6a, 'name': SearchDatastore_Task, 'duration_secs': 0.03325} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2072.580161] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3243a75-cba6-4bcc-b128-8511c9d5d72a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2072.586824] env[63024]: DEBUG oslo_vmware.api [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Waiting for the task: (returnval){ [ 2072.586824] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5219b005-87be-c316-1f86-a9feef63be60" [ 2072.586824] env[63024]: _type = "Task" [ 2072.586824] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2072.596275] env[63024]: DEBUG oslo_vmware.api [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5219b005-87be-c316-1f86-a9feef63be60, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2072.752694] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2072.839947] env[63024]: WARNING nova.network.neutron [None req-25d9905c-bdd0-41b5-8e63-37abb5b82df9 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] ffb24eaf-c6b6-414f-a69a-0c8806712ddd already exists in list: networks containing: ['ffb24eaf-c6b6-414f-a69a-0c8806712ddd']. ignoring it [ 2072.870832] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "refresh_cache-0d253199-adf8-45c0-a6bf-b11c12b08688" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2072.871091] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquired lock "refresh_cache-0d253199-adf8-45c0-a6bf-b11c12b08688" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2072.871255] env[63024]: DEBUG nova.network.neutron [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2072.913950] env[63024]: DEBUG nova.network.neutron [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Successfully created port: 38ea7e67-f8df-4441-93e8-2983babd9f62 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2072.926558] env[63024]: DEBUG nova.compute.manager [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2073.097533] env[63024]: DEBUG oslo_vmware.api [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5219b005-87be-c316-1f86-a9feef63be60, 'name': SearchDatastore_Task, 'duration_secs': 0.012112} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2073.097800] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2073.098156] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] c5541241-84e2-4216-b6f9-4c716f29d759/c5541241-84e2-4216-b6f9-4c716f29d759.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2073.098466] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-34971ce3-c8e0-4649-92f0-07058e25710a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.111974] env[63024]: DEBUG oslo_vmware.api [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Waiting for the task: (returnval){ [ 2073.111974] env[63024]: value = "task-1951664" [ 2073.111974] env[63024]: _type = "Task" [ 2073.111974] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2073.127407] env[63024]: DEBUG oslo_vmware.api [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Task: {'id': task-1951664, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2073.226043] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36cea589-0d33-4694-98d9-51a9d869d8d7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.234447] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c5f553f-f695-42f4-98c8-dd116538a468 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.277999] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fddae35f-287a-46d5-8df7-d47e121d9b04 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.287791] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cd34703-bb8f-4f30-9508-865b983e0295 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.307467] env[63024]: DEBUG nova.compute.provider_tree [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2073.460399] env[63024]: DEBUG nova.network.neutron [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2073.557757] env[63024]: DEBUG nova.network.neutron [None req-25d9905c-bdd0-41b5-8e63-37abb5b82df9 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Updating instance_info_cache with network_info: [{"id": "041c096f-ef1b-49ad-aadb-469b89fe4f25", "address": "fa:16:3e:8b:4e:6d", "network": {"id": "ffb24eaf-c6b6-414f-a69a-0c8806712ddd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-281590202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9521048e807c4ca2a6d2e74a72b829a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap041c096f-ef", "ovs_interfaceid": "041c096f-ef1b-49ad-aadb-469b89fe4f25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "b0190de6-3c0b-430e-9952-40bdf36d8b58", "address": "fa:16:3e:6e:6f:85", "network": {"id": "ffb24eaf-c6b6-414f-a69a-0c8806712ddd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-281590202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9521048e807c4ca2a6d2e74a72b829a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0190de6-3c", "ovs_interfaceid": "b0190de6-3c0b-430e-9952-40bdf36d8b58", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2073.630090] env[63024]: DEBUG oslo_vmware.api [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Task: {'id': task-1951664, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.513062} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2073.633423] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] c5541241-84e2-4216-b6f9-4c716f29d759/c5541241-84e2-4216-b6f9-4c716f29d759.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2073.633702] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2073.634027] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0f6f2f15-6834-4d3b-b715-660c9dae62c3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.643719] env[63024]: DEBUG oslo_vmware.api [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Waiting for the task: (returnval){ [ 2073.643719] env[63024]: value = "task-1951665" [ 2073.643719] env[63024]: _type = "Task" [ 2073.643719] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2073.656330] env[63024]: DEBUG oslo_vmware.api [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Task: {'id': task-1951665, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2073.745390] env[63024]: DEBUG nova.network.neutron [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Updating instance_info_cache with network_info: [{"id": "5d2fbce5-45d5-4e27-bc41-6f77e528f245", "address": "fa:16:3e:91:c6:2f", "network": {"id": "8b46e914-e3e8-40c4-8f9d-01d1f97bf25e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1558746173-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bbfeec6d47746328f185acd132e0d5a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afae6acd-1873-4228-9d5a-1cd5d4efe3e4", "external-id": "nsx-vlan-transportzone-183", "segmentation_id": 183, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d2fbce5-45", "ovs_interfaceid": "5d2fbce5-45d5-4e27-bc41-6f77e528f245", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2073.810607] env[63024]: DEBUG nova.scheduler.client.report [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2073.943038] env[63024]: DEBUG nova.compute.manager [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2073.968632] env[63024]: DEBUG nova.virt.hardware [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2073.968897] env[63024]: DEBUG nova.virt.hardware [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2073.969092] env[63024]: DEBUG nova.virt.hardware [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2073.969300] env[63024]: DEBUG nova.virt.hardware [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2073.969450] env[63024]: DEBUG nova.virt.hardware [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2073.969602] env[63024]: DEBUG nova.virt.hardware [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2073.969817] env[63024]: DEBUG nova.virt.hardware [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2073.969981] env[63024]: DEBUG nova.virt.hardware [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2073.970176] env[63024]: DEBUG nova.virt.hardware [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2073.970348] env[63024]: DEBUG nova.virt.hardware [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2073.970527] env[63024]: DEBUG nova.virt.hardware [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2073.971437] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f93a7b9e-727e-4cb4-830b-1bc88a5c5cbb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2073.980261] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a373a27-4401-4729-a03d-9c18d4a807b9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.063283] env[63024]: DEBUG oslo_concurrency.lockutils [None req-25d9905c-bdd0-41b5-8e63-37abb5b82df9 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Releasing lock "refresh_cache-34e4db8e-e0d9-4a27-9368-c5e711b51a29" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2074.063973] env[63024]: DEBUG oslo_concurrency.lockutils [None req-25d9905c-bdd0-41b5-8e63-37abb5b82df9 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "34e4db8e-e0d9-4a27-9368-c5e711b51a29" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2074.064192] env[63024]: DEBUG oslo_concurrency.lockutils [None req-25d9905c-bdd0-41b5-8e63-37abb5b82df9 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquired lock "34e4db8e-e0d9-4a27-9368-c5e711b51a29" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2074.065074] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36d0516d-6323-4cf7-b7cf-fd0d0ebff86d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.082817] env[63024]: DEBUG nova.virt.hardware [None req-25d9905c-bdd0-41b5-8e63-37abb5b82df9 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2074.083094] env[63024]: DEBUG nova.virt.hardware [None req-25d9905c-bdd0-41b5-8e63-37abb5b82df9 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2074.083261] env[63024]: DEBUG nova.virt.hardware [None req-25d9905c-bdd0-41b5-8e63-37abb5b82df9 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2074.083447] env[63024]: DEBUG nova.virt.hardware [None req-25d9905c-bdd0-41b5-8e63-37abb5b82df9 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2074.083595] env[63024]: DEBUG nova.virt.hardware [None req-25d9905c-bdd0-41b5-8e63-37abb5b82df9 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2074.083743] env[63024]: DEBUG nova.virt.hardware [None req-25d9905c-bdd0-41b5-8e63-37abb5b82df9 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2074.083951] env[63024]: DEBUG nova.virt.hardware [None req-25d9905c-bdd0-41b5-8e63-37abb5b82df9 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2074.084170] env[63024]: DEBUG nova.virt.hardware [None req-25d9905c-bdd0-41b5-8e63-37abb5b82df9 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2074.084355] env[63024]: DEBUG nova.virt.hardware [None req-25d9905c-bdd0-41b5-8e63-37abb5b82df9 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2074.084522] env[63024]: DEBUG nova.virt.hardware [None req-25d9905c-bdd0-41b5-8e63-37abb5b82df9 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2074.084695] env[63024]: DEBUG nova.virt.hardware [None req-25d9905c-bdd0-41b5-8e63-37abb5b82df9 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2074.090908] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-25d9905c-bdd0-41b5-8e63-37abb5b82df9 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Reconfiguring VM to attach interface {{(pid=63024) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 2074.091534] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2ed9bdd9-4e69-4dbd-a71e-b58c5c016c6a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.110144] env[63024]: DEBUG oslo_vmware.api [None req-25d9905c-bdd0-41b5-8e63-37abb5b82df9 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 2074.110144] env[63024]: value = "task-1951666" [ 2074.110144] env[63024]: _type = "Task" [ 2074.110144] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2074.119965] env[63024]: DEBUG oslo_vmware.api [None req-25d9905c-bdd0-41b5-8e63-37abb5b82df9 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951666, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2074.154041] env[63024]: DEBUG oslo_vmware.api [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Task: {'id': task-1951665, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070037} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2074.154305] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2074.155278] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e0dec66-7872-4689-99b1-38f28a049d5c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.178137] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] c5541241-84e2-4216-b6f9-4c716f29d759/c5541241-84e2-4216-b6f9-4c716f29d759.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2074.178446] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0ab5b96c-d2b8-4e19-88a0-a9ec248ad2ba {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.201093] env[63024]: DEBUG oslo_vmware.api [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Waiting for the task: (returnval){ [ 2074.201093] env[63024]: value = "task-1951667" [ 2074.201093] env[63024]: _type = "Task" [ 2074.201093] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2074.208669] env[63024]: DEBUG oslo_vmware.api [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Task: {'id': task-1951667, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2074.243436] env[63024]: DEBUG nova.compute.manager [req-a40cb6a2-34e6-4cd6-8c4c-ba71856f3b68 req-daa17e57-0209-407e-8c1b-a1741a5c00cd service nova] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Received event network-changed-b0190de6-3c0b-430e-9952-40bdf36d8b58 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2074.243436] env[63024]: DEBUG nova.compute.manager [req-a40cb6a2-34e6-4cd6-8c4c-ba71856f3b68 req-daa17e57-0209-407e-8c1b-a1741a5c00cd service nova] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Refreshing instance network info cache due to event network-changed-b0190de6-3c0b-430e-9952-40bdf36d8b58. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2074.243921] env[63024]: DEBUG oslo_concurrency.lockutils [req-a40cb6a2-34e6-4cd6-8c4c-ba71856f3b68 req-daa17e57-0209-407e-8c1b-a1741a5c00cd service nova] Acquiring lock "refresh_cache-34e4db8e-e0d9-4a27-9368-c5e711b51a29" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2074.243921] env[63024]: DEBUG oslo_concurrency.lockutils [req-a40cb6a2-34e6-4cd6-8c4c-ba71856f3b68 req-daa17e57-0209-407e-8c1b-a1741a5c00cd service nova] Acquired lock "refresh_cache-34e4db8e-e0d9-4a27-9368-c5e711b51a29" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2074.243921] env[63024]: DEBUG nova.network.neutron [req-a40cb6a2-34e6-4cd6-8c4c-ba71856f3b68 req-daa17e57-0209-407e-8c1b-a1741a5c00cd service nova] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Refreshing network info cache for port b0190de6-3c0b-430e-9952-40bdf36d8b58 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2074.247899] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Releasing lock "refresh_cache-0d253199-adf8-45c0-a6bf-b11c12b08688" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2074.248199] env[63024]: DEBUG nova.compute.manager [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Instance network_info: |[{"id": "5d2fbce5-45d5-4e27-bc41-6f77e528f245", "address": "fa:16:3e:91:c6:2f", "network": {"id": "8b46e914-e3e8-40c4-8f9d-01d1f97bf25e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1558746173-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bbfeec6d47746328f185acd132e0d5a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afae6acd-1873-4228-9d5a-1cd5d4efe3e4", "external-id": "nsx-vlan-transportzone-183", "segmentation_id": 183, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d2fbce5-45", "ovs_interfaceid": "5d2fbce5-45d5-4e27-bc41-6f77e528f245", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2074.248581] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:91:c6:2f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'afae6acd-1873-4228-9d5a-1cd5d4efe3e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5d2fbce5-45d5-4e27-bc41-6f77e528f245', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2074.255955] env[63024]: DEBUG oslo.service.loopingcall [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2074.256821] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2074.257064] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-455d394b-93b5-43d4-ae75-389bbe6d76a1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2074.278210] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2074.278210] env[63024]: value = "task-1951668" [ 2074.278210] env[63024]: _type = "Task" [ 2074.278210] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2074.287250] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951668, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2074.318104] env[63024]: DEBUG oslo_concurrency.lockutils [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.405s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2074.318695] env[63024]: DEBUG nova.compute.manager [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2074.321604] env[63024]: DEBUG oslo_concurrency.lockutils [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.859s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2074.323573] env[63024]: INFO nova.compute.claims [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2074.624105] env[63024]: DEBUG oslo_vmware.api [None req-25d9905c-bdd0-41b5-8e63-37abb5b82df9 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951666, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2074.712571] env[63024]: DEBUG oslo_vmware.api [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Task: {'id': task-1951667, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2074.789523] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951668, 'name': CreateVM_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2074.827986] env[63024]: DEBUG nova.compute.utils [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2074.832716] env[63024]: DEBUG nova.compute.manager [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2074.833011] env[63024]: DEBUG nova.network.neutron [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2074.963807] env[63024]: DEBUG nova.policy [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a7c32db2d81e40c492c1362d8356a03c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '93098ad83ae144bf90a12c97ec863c06', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 2075.122674] env[63024]: DEBUG oslo_vmware.api [None req-25d9905c-bdd0-41b5-8e63-37abb5b82df9 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951666, 'name': ReconfigVM_Task, 'duration_secs': 0.87019} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2075.123454] env[63024]: DEBUG oslo_concurrency.lockutils [None req-25d9905c-bdd0-41b5-8e63-37abb5b82df9 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Releasing lock "34e4db8e-e0d9-4a27-9368-c5e711b51a29" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2075.123630] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-25d9905c-bdd0-41b5-8e63-37abb5b82df9 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Reconfigured VM to attach interface {{(pid=63024) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 2075.212573] env[63024]: DEBUG oslo_vmware.api [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Task: {'id': task-1951667, 'name': ReconfigVM_Task, 'duration_secs': 0.568793} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2075.213026] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Reconfigured VM instance instance-00000066 to attach disk [datastore1] c5541241-84e2-4216-b6f9-4c716f29d759/c5541241-84e2-4216-b6f9-4c716f29d759.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2075.213683] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c15a2d3e-0366-4c67-8305-124ca57da077 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.221119] env[63024]: DEBUG oslo_vmware.api [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Waiting for the task: (returnval){ [ 2075.221119] env[63024]: value = "task-1951669" [ 2075.221119] env[63024]: _type = "Task" [ 2075.221119] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2075.232043] env[63024]: DEBUG oslo_vmware.api [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Task: {'id': task-1951669, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2075.289148] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951668, 'name': CreateVM_Task, 'duration_secs': 0.769436} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2075.289861] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2075.290396] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2075.290559] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2075.294021] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2075.294021] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7fc2d55d-234e-4b2f-b9a1-8b1b39bf0218 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.296330] env[63024]: DEBUG oslo_vmware.api [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 2075.296330] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e4f8aa-4d6d-069f-a2db-d1ff0e7f5fbf" [ 2075.296330] env[63024]: _type = "Task" [ 2075.296330] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2075.305008] env[63024]: DEBUG oslo_vmware.api [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e4f8aa-4d6d-069f-a2db-d1ff0e7f5fbf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2075.336654] env[63024]: DEBUG nova.compute.manager [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2075.459070] env[63024]: DEBUG nova.network.neutron [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Successfully updated port: 38ea7e67-f8df-4441-93e8-2983babd9f62 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2075.514450] env[63024]: DEBUG nova.network.neutron [req-a40cb6a2-34e6-4cd6-8c4c-ba71856f3b68 req-daa17e57-0209-407e-8c1b-a1741a5c00cd service nova] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Updated VIF entry in instance network info cache for port b0190de6-3c0b-430e-9952-40bdf36d8b58. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2075.514878] env[63024]: DEBUG nova.network.neutron [req-a40cb6a2-34e6-4cd6-8c4c-ba71856f3b68 req-daa17e57-0209-407e-8c1b-a1741a5c00cd service nova] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Updating instance_info_cache with network_info: [{"id": "041c096f-ef1b-49ad-aadb-469b89fe4f25", "address": "fa:16:3e:8b:4e:6d", "network": {"id": "ffb24eaf-c6b6-414f-a69a-0c8806712ddd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-281590202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9521048e807c4ca2a6d2e74a72b829a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap041c096f-ef", "ovs_interfaceid": "041c096f-ef1b-49ad-aadb-469b89fe4f25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "b0190de6-3c0b-430e-9952-40bdf36d8b58", "address": "fa:16:3e:6e:6f:85", "network": {"id": "ffb24eaf-c6b6-414f-a69a-0c8806712ddd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-281590202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9521048e807c4ca2a6d2e74a72b829a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0190de6-3c", "ovs_interfaceid": "b0190de6-3c0b-430e-9952-40bdf36d8b58", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2075.579573] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29d05d56-e05a-459d-80f7-e81176378164 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.588471] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a93a82a7-b66f-44c8-aae3-33173115c4fe {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.620205] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7095f800-10ce-4c0a-ba2d-c3060ec424cd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.630864] env[63024]: DEBUG oslo_concurrency.lockutils [None req-25d9905c-bdd0-41b5-8e63-37abb5b82df9 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "interface-34e4db8e-e0d9-4a27-9368-c5e711b51a29-b0190de6-3c0b-430e-9952-40bdf36d8b58" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.362s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2075.634060] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-931fd1d3-8145-4a1c-9b9b-400a609377e5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.654017] env[63024]: DEBUG nova.compute.provider_tree [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2075.683331] env[63024]: DEBUG nova.network.neutron [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Successfully created port: a75a35bb-1971-4617-9a1a-5750c7485384 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2075.731012] env[63024]: DEBUG oslo_vmware.api [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Task: {'id': task-1951669, 'name': Rename_Task, 'duration_secs': 0.150202} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2075.731316] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2075.731608] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1c77ea52-2c1a-43b6-ad88-0857611980a2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.738871] env[63024]: DEBUG oslo_vmware.api [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Waiting for the task: (returnval){ [ 2075.738871] env[63024]: value = "task-1951670" [ 2075.738871] env[63024]: _type = "Task" [ 2075.738871] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2075.747305] env[63024]: DEBUG oslo_vmware.api [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Task: {'id': task-1951670, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2075.811250] env[63024]: DEBUG oslo_vmware.api [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e4f8aa-4d6d-069f-a2db-d1ff0e7f5fbf, 'name': SearchDatastore_Task, 'duration_secs': 0.011081} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2075.811250] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2075.811250] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2075.811250] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2075.811250] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2075.811250] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2075.811250] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bde53703-1217-447b-95ff-45cf02df17f8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.827023] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2075.827023] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2075.827506] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-362738a5-421e-4936-a458-44ac1f306ee6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2075.834944] env[63024]: DEBUG oslo_vmware.api [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 2075.834944] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5275b186-cb41-2105-4186-cf63892289e3" [ 2075.834944] env[63024]: _type = "Task" [ 2075.834944] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2075.846644] env[63024]: DEBUG oslo_vmware.api [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5275b186-cb41-2105-4186-cf63892289e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2075.960975] env[63024]: DEBUG oslo_concurrency.lockutils [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Acquiring lock "refresh_cache-5192ad93-a4e9-4aa0-983d-186ab17360f0" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2075.961178] env[63024]: DEBUG oslo_concurrency.lockutils [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Acquired lock "refresh_cache-5192ad93-a4e9-4aa0-983d-186ab17360f0" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2075.961397] env[63024]: DEBUG nova.network.neutron [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2076.019283] env[63024]: DEBUG oslo_concurrency.lockutils [req-a40cb6a2-34e6-4cd6-8c4c-ba71856f3b68 req-daa17e57-0209-407e-8c1b-a1741a5c00cd service nova] Releasing lock "refresh_cache-34e4db8e-e0d9-4a27-9368-c5e711b51a29" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2076.019649] env[63024]: DEBUG nova.compute.manager [req-a40cb6a2-34e6-4cd6-8c4c-ba71856f3b68 req-daa17e57-0209-407e-8c1b-a1741a5c00cd service nova] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Received event network-vif-plugged-5d2fbce5-45d5-4e27-bc41-6f77e528f245 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2076.020014] env[63024]: DEBUG oslo_concurrency.lockutils [req-a40cb6a2-34e6-4cd6-8c4c-ba71856f3b68 req-daa17e57-0209-407e-8c1b-a1741a5c00cd service nova] Acquiring lock "0d253199-adf8-45c0-a6bf-b11c12b08688-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2076.020305] env[63024]: DEBUG oslo_concurrency.lockutils [req-a40cb6a2-34e6-4cd6-8c4c-ba71856f3b68 req-daa17e57-0209-407e-8c1b-a1741a5c00cd service nova] Lock "0d253199-adf8-45c0-a6bf-b11c12b08688-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2076.020485] env[63024]: DEBUG oslo_concurrency.lockutils [req-a40cb6a2-34e6-4cd6-8c4c-ba71856f3b68 req-daa17e57-0209-407e-8c1b-a1741a5c00cd service nova] Lock "0d253199-adf8-45c0-a6bf-b11c12b08688-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2076.020656] env[63024]: DEBUG nova.compute.manager [req-a40cb6a2-34e6-4cd6-8c4c-ba71856f3b68 req-daa17e57-0209-407e-8c1b-a1741a5c00cd service nova] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] No waiting events found dispatching network-vif-plugged-5d2fbce5-45d5-4e27-bc41-6f77e528f245 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2076.020828] env[63024]: WARNING nova.compute.manager [req-a40cb6a2-34e6-4cd6-8c4c-ba71856f3b68 req-daa17e57-0209-407e-8c1b-a1741a5c00cd service nova] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Received unexpected event network-vif-plugged-5d2fbce5-45d5-4e27-bc41-6f77e528f245 for instance with vm_state building and task_state spawning. [ 2076.020996] env[63024]: DEBUG nova.compute.manager [req-a40cb6a2-34e6-4cd6-8c4c-ba71856f3b68 req-daa17e57-0209-407e-8c1b-a1741a5c00cd service nova] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Received event network-changed-5d2fbce5-45d5-4e27-bc41-6f77e528f245 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2076.021218] env[63024]: DEBUG nova.compute.manager [req-a40cb6a2-34e6-4cd6-8c4c-ba71856f3b68 req-daa17e57-0209-407e-8c1b-a1741a5c00cd service nova] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Refreshing instance network info cache due to event network-changed-5d2fbce5-45d5-4e27-bc41-6f77e528f245. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2076.021467] env[63024]: DEBUG oslo_concurrency.lockutils [req-a40cb6a2-34e6-4cd6-8c4c-ba71856f3b68 req-daa17e57-0209-407e-8c1b-a1741a5c00cd service nova] Acquiring lock "refresh_cache-0d253199-adf8-45c0-a6bf-b11c12b08688" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2076.021648] env[63024]: DEBUG oslo_concurrency.lockutils [req-a40cb6a2-34e6-4cd6-8c4c-ba71856f3b68 req-daa17e57-0209-407e-8c1b-a1741a5c00cd service nova] Acquired lock "refresh_cache-0d253199-adf8-45c0-a6bf-b11c12b08688" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2076.021818] env[63024]: DEBUG nova.network.neutron [req-a40cb6a2-34e6-4cd6-8c4c-ba71856f3b68 req-daa17e57-0209-407e-8c1b-a1741a5c00cd service nova] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Refreshing network info cache for port 5d2fbce5-45d5-4e27-bc41-6f77e528f245 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2076.134166] env[63024]: DEBUG oslo_concurrency.lockutils [None req-070e8c49-7aa2-4298-b2e6-44d775335cd0 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquiring lock "3cf2726c-2551-4bbd-8032-006062cdcc39" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2076.134465] env[63024]: DEBUG oslo_concurrency.lockutils [None req-070e8c49-7aa2-4298-b2e6-44d775335cd0 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "3cf2726c-2551-4bbd-8032-006062cdcc39" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2076.134703] env[63024]: DEBUG oslo_concurrency.lockutils [None req-070e8c49-7aa2-4298-b2e6-44d775335cd0 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquiring lock "3cf2726c-2551-4bbd-8032-006062cdcc39-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2076.134927] env[63024]: DEBUG oslo_concurrency.lockutils [None req-070e8c49-7aa2-4298-b2e6-44d775335cd0 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "3cf2726c-2551-4bbd-8032-006062cdcc39-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2076.135145] env[63024]: DEBUG oslo_concurrency.lockutils [None req-070e8c49-7aa2-4298-b2e6-44d775335cd0 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "3cf2726c-2551-4bbd-8032-006062cdcc39-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2076.137805] env[63024]: INFO nova.compute.manager [None req-070e8c49-7aa2-4298-b2e6-44d775335cd0 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Terminating instance [ 2076.157621] env[63024]: DEBUG nova.scheduler.client.report [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2076.250673] env[63024]: DEBUG oslo_vmware.api [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Task: {'id': task-1951670, 'name': PowerOnVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2076.345245] env[63024]: DEBUG oslo_vmware.api [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5275b186-cb41-2105-4186-cf63892289e3, 'name': SearchDatastore_Task, 'duration_secs': 0.012559} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2076.346067] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-57ec76ea-2095-441e-b5e6-a0e86ccb1c46 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.349031] env[63024]: DEBUG nova.compute.manager [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2076.354962] env[63024]: DEBUG oslo_vmware.api [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 2076.354962] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]520539eb-1aab-c07e-0c20-7e17484b8a05" [ 2076.354962] env[63024]: _type = "Task" [ 2076.354962] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2076.363449] env[63024]: DEBUG oslo_vmware.api [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]520539eb-1aab-c07e-0c20-7e17484b8a05, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2076.377341] env[63024]: DEBUG nova.virt.hardware [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2076.377586] env[63024]: DEBUG nova.virt.hardware [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2076.377742] env[63024]: DEBUG nova.virt.hardware [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2076.377921] env[63024]: DEBUG nova.virt.hardware [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2076.378082] env[63024]: DEBUG nova.virt.hardware [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2076.378237] env[63024]: DEBUG nova.virt.hardware [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2076.378444] env[63024]: DEBUG nova.virt.hardware [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2076.378603] env[63024]: DEBUG nova.virt.hardware [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2076.378770] env[63024]: DEBUG nova.virt.hardware [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2076.378935] env[63024]: DEBUG nova.virt.hardware [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2076.379182] env[63024]: DEBUG nova.virt.hardware [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2076.379981] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b04bcae-9732-4c95-a05d-afdbaf453dfa {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.391019] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53670236-cc9c-451d-99c2-d6eea6eeb892 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.417707] env[63024]: DEBUG nova.compute.manager [req-cad37649-e94c-420e-9ba8-0f1c3afa47b0 req-1f70fc53-b1bc-4fa8-8476-2267a81d9f6e service nova] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Received event network-vif-plugged-38ea7e67-f8df-4441-93e8-2983babd9f62 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2076.417957] env[63024]: DEBUG oslo_concurrency.lockutils [req-cad37649-e94c-420e-9ba8-0f1c3afa47b0 req-1f70fc53-b1bc-4fa8-8476-2267a81d9f6e service nova] Acquiring lock "5192ad93-a4e9-4aa0-983d-186ab17360f0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2076.418165] env[63024]: DEBUG oslo_concurrency.lockutils [req-cad37649-e94c-420e-9ba8-0f1c3afa47b0 req-1f70fc53-b1bc-4fa8-8476-2267a81d9f6e service nova] Lock "5192ad93-a4e9-4aa0-983d-186ab17360f0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2076.418344] env[63024]: DEBUG oslo_concurrency.lockutils [req-cad37649-e94c-420e-9ba8-0f1c3afa47b0 req-1f70fc53-b1bc-4fa8-8476-2267a81d9f6e service nova] Lock "5192ad93-a4e9-4aa0-983d-186ab17360f0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2076.418527] env[63024]: DEBUG nova.compute.manager [req-cad37649-e94c-420e-9ba8-0f1c3afa47b0 req-1f70fc53-b1bc-4fa8-8476-2267a81d9f6e service nova] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] No waiting events found dispatching network-vif-plugged-38ea7e67-f8df-4441-93e8-2983babd9f62 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2076.418694] env[63024]: WARNING nova.compute.manager [req-cad37649-e94c-420e-9ba8-0f1c3afa47b0 req-1f70fc53-b1bc-4fa8-8476-2267a81d9f6e service nova] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Received unexpected event network-vif-plugged-38ea7e67-f8df-4441-93e8-2983babd9f62 for instance with vm_state building and task_state spawning. [ 2076.418849] env[63024]: DEBUG nova.compute.manager [req-cad37649-e94c-420e-9ba8-0f1c3afa47b0 req-1f70fc53-b1bc-4fa8-8476-2267a81d9f6e service nova] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Received event network-changed-38ea7e67-f8df-4441-93e8-2983babd9f62 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2076.419007] env[63024]: DEBUG nova.compute.manager [req-cad37649-e94c-420e-9ba8-0f1c3afa47b0 req-1f70fc53-b1bc-4fa8-8476-2267a81d9f6e service nova] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Refreshing instance network info cache due to event network-changed-38ea7e67-f8df-4441-93e8-2983babd9f62. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2076.419181] env[63024]: DEBUG oslo_concurrency.lockutils [req-cad37649-e94c-420e-9ba8-0f1c3afa47b0 req-1f70fc53-b1bc-4fa8-8476-2267a81d9f6e service nova] Acquiring lock "refresh_cache-5192ad93-a4e9-4aa0-983d-186ab17360f0" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2076.490265] env[63024]: DEBUG nova.network.neutron [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2076.641918] env[63024]: DEBUG nova.compute.manager [None req-070e8c49-7aa2-4298-b2e6-44d775335cd0 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2076.642187] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-070e8c49-7aa2-4298-b2e6-44d775335cd0 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2076.643138] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ae49755-373e-4654-91f0-03856feefc16 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.653101] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-070e8c49-7aa2-4298-b2e6-44d775335cd0 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2076.653364] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6b0a092b-53c0-47bd-9a58-e363acb68a8c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.662056] env[63024]: DEBUG oslo_vmware.api [None req-070e8c49-7aa2-4298-b2e6-44d775335cd0 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 2076.662056] env[63024]: value = "task-1951671" [ 2076.662056] env[63024]: _type = "Task" [ 2076.662056] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2076.665650] env[63024]: DEBUG oslo_concurrency.lockutils [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.344s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2076.666150] env[63024]: DEBUG nova.compute.manager [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2076.668942] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fb7841a6-414c-498d-b9c6-f9dd94a3e267 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 16.257s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2076.677603] env[63024]: DEBUG oslo_vmware.api [None req-070e8c49-7aa2-4298-b2e6-44d775335cd0 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951671, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2076.751239] env[63024]: DEBUG oslo_vmware.api [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Task: {'id': task-1951670, 'name': PowerOnVM_Task, 'duration_secs': 0.584558} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2076.751633] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2076.751909] env[63024]: DEBUG nova.compute.manager [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2076.755782] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4494ba58-ff57-4607-8497-e90bb14346d0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.807545] env[63024]: DEBUG nova.network.neutron [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Updating instance_info_cache with network_info: [{"id": "38ea7e67-f8df-4441-93e8-2983babd9f62", "address": "fa:16:3e:2f:ad:7e", "network": {"id": "83ed1c04-a2e0-4c15-ae35-68e988607ce4", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-470202335-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5dcb1fcc9fd945cb9f4477fe1cce3f5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38ea7e67-f8", "ovs_interfaceid": "38ea7e67-f8df-4441-93e8-2983babd9f62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2076.866651] env[63024]: DEBUG oslo_vmware.api [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]520539eb-1aab-c07e-0c20-7e17484b8a05, 'name': SearchDatastore_Task, 'duration_secs': 0.012641} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2076.867046] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2076.867304] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 0d253199-adf8-45c0-a6bf-b11c12b08688/0d253199-adf8-45c0-a6bf-b11c12b08688.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2076.867625] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-429eaa49-0af5-4700-8b1b-3d0b4ba54269 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2076.876608] env[63024]: DEBUG oslo_vmware.api [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 2076.876608] env[63024]: value = "task-1951672" [ 2076.876608] env[63024]: _type = "Task" [ 2076.876608] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2076.886881] env[63024]: DEBUG oslo_vmware.api [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951672, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2076.959238] env[63024]: DEBUG nova.network.neutron [req-a40cb6a2-34e6-4cd6-8c4c-ba71856f3b68 req-daa17e57-0209-407e-8c1b-a1741a5c00cd service nova] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Updated VIF entry in instance network info cache for port 5d2fbce5-45d5-4e27-bc41-6f77e528f245. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2076.959238] env[63024]: DEBUG nova.network.neutron [req-a40cb6a2-34e6-4cd6-8c4c-ba71856f3b68 req-daa17e57-0209-407e-8c1b-a1741a5c00cd service nova] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Updating instance_info_cache with network_info: [{"id": "5d2fbce5-45d5-4e27-bc41-6f77e528f245", "address": "fa:16:3e:91:c6:2f", "network": {"id": "8b46e914-e3e8-40c4-8f9d-01d1f97bf25e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1558746173-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bbfeec6d47746328f185acd132e0d5a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afae6acd-1873-4228-9d5a-1cd5d4efe3e4", "external-id": "nsx-vlan-transportzone-183", "segmentation_id": 183, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d2fbce5-45", "ovs_interfaceid": "5d2fbce5-45d5-4e27-bc41-6f77e528f245", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2077.001679] env[63024]: DEBUG oslo_concurrency.lockutils [None req-66d44e09-efec-4ca1-84ee-66480935d1c3 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "interface-34e4db8e-e0d9-4a27-9368-c5e711b51a29-b0190de6-3c0b-430e-9952-40bdf36d8b58" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2077.002464] env[63024]: DEBUG oslo_concurrency.lockutils [None req-66d44e09-efec-4ca1-84ee-66480935d1c3 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "interface-34e4db8e-e0d9-4a27-9368-c5e711b51a29-b0190de6-3c0b-430e-9952-40bdf36d8b58" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2077.175120] env[63024]: DEBUG nova.compute.utils [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2077.187108] env[63024]: DEBUG nova.compute.manager [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2077.187338] env[63024]: DEBUG nova.network.neutron [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2077.189412] env[63024]: DEBUG oslo_vmware.api [None req-070e8c49-7aa2-4298-b2e6-44d775335cd0 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951671, 'name': PowerOffVM_Task, 'duration_secs': 0.289804} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2077.190529] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-070e8c49-7aa2-4298-b2e6-44d775335cd0 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2077.190862] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-070e8c49-7aa2-4298-b2e6-44d775335cd0 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2077.191726] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-419669d1-634a-48e1-8e2b-6e586bf6a8db {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.283420] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2077.305292] env[63024]: DEBUG nova.policy [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1efa94ebfd9143d7bb129313b3e3d5d0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a7fc70d467714e59b3c171a308feafdf', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 2077.309994] env[63024]: DEBUG oslo_concurrency.lockutils [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Releasing lock "refresh_cache-5192ad93-a4e9-4aa0-983d-186ab17360f0" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2077.310560] env[63024]: DEBUG nova.compute.manager [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Instance network_info: |[{"id": "38ea7e67-f8df-4441-93e8-2983babd9f62", "address": "fa:16:3e:2f:ad:7e", "network": {"id": "83ed1c04-a2e0-4c15-ae35-68e988607ce4", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-470202335-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5dcb1fcc9fd945cb9f4477fe1cce3f5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38ea7e67-f8", "ovs_interfaceid": "38ea7e67-f8df-4441-93e8-2983babd9f62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2077.311342] env[63024]: DEBUG oslo_concurrency.lockutils [req-cad37649-e94c-420e-9ba8-0f1c3afa47b0 req-1f70fc53-b1bc-4fa8-8476-2267a81d9f6e service nova] Acquired lock "refresh_cache-5192ad93-a4e9-4aa0-983d-186ab17360f0" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2077.312109] env[63024]: DEBUG nova.network.neutron [req-cad37649-e94c-420e-9ba8-0f1c3afa47b0 req-1f70fc53-b1bc-4fa8-8476-2267a81d9f6e service nova] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Refreshing network info cache for port 38ea7e67-f8df-4441-93e8-2983babd9f62 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2077.313658] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2f:ad:7e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4406a73e-2189-46ac-9e96-4f0af80b5094', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '38ea7e67-f8df-4441-93e8-2983babd9f62', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2077.324246] env[63024]: DEBUG oslo.service.loopingcall [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2077.329799] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2077.330563] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f9518c17-2e75-423b-bca7-4adefe55c2ba {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.352792] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-070e8c49-7aa2-4298-b2e6-44d775335cd0 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2077.353638] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-070e8c49-7aa2-4298-b2e6-44d775335cd0 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2077.354547] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-070e8c49-7aa2-4298-b2e6-44d775335cd0 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Deleting the datastore file [datastore1] 3cf2726c-2551-4bbd-8032-006062cdcc39 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2077.358197] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-784346fa-c4ef-444c-88b5-912687f3da57 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.366355] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2077.366355] env[63024]: value = "task-1951674" [ 2077.366355] env[63024]: _type = "Task" [ 2077.366355] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2077.372176] env[63024]: DEBUG oslo_vmware.api [None req-070e8c49-7aa2-4298-b2e6-44d775335cd0 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 2077.372176] env[63024]: value = "task-1951675" [ 2077.372176] env[63024]: _type = "Task" [ 2077.372176] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2077.383289] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951674, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2077.391890] env[63024]: DEBUG oslo_vmware.api [None req-070e8c49-7aa2-4298-b2e6-44d775335cd0 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951675, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2077.398364] env[63024]: DEBUG oslo_vmware.api [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951672, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.490906} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2077.398680] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 0d253199-adf8-45c0-a6bf-b11c12b08688/0d253199-adf8-45c0-a6bf-b11c12b08688.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2077.398901] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2077.399182] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bfcc0136-7aa9-4c7f-b6dc-19fef3c14d6e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.416892] env[63024]: DEBUG oslo_vmware.api [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 2077.416892] env[63024]: value = "task-1951676" [ 2077.416892] env[63024]: _type = "Task" [ 2077.416892] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2077.434847] env[63024]: DEBUG oslo_vmware.api [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951676, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2077.462700] env[63024]: DEBUG oslo_concurrency.lockutils [req-a40cb6a2-34e6-4cd6-8c4c-ba71856f3b68 req-daa17e57-0209-407e-8c1b-a1741a5c00cd service nova] Releasing lock "refresh_cache-0d253199-adf8-45c0-a6bf-b11c12b08688" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2077.508344] env[63024]: DEBUG oslo_concurrency.lockutils [None req-66d44e09-efec-4ca1-84ee-66480935d1c3 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "34e4db8e-e0d9-4a27-9368-c5e711b51a29" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2077.508527] env[63024]: DEBUG oslo_concurrency.lockutils [None req-66d44e09-efec-4ca1-84ee-66480935d1c3 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquired lock "34e4db8e-e0d9-4a27-9368-c5e711b51a29" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2077.513230] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7e52dbb-6938-4bad-9e15-d33debd5cdde {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.536080] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73ab214a-a3d6-4459-83e5-2498f40e9baa {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.539775] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0a1c096-4882-4296-9221-7e4d91fb7a5a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.567559] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6f3e5c8-d52b-4176-b57f-9b2032d627d6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.577419] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-66d44e09-efec-4ca1-84ee-66480935d1c3 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Reconfiguring VM to detach interface {{(pid=63024) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 2077.577763] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d7dfd8cf-2c5c-4ad1-a122-da60f00a6b2c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.632791] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7c532dc-4a01-463a-a1a3-8e6d2fb42962 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.636395] env[63024]: DEBUG oslo_vmware.api [None req-66d44e09-efec-4ca1-84ee-66480935d1c3 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 2077.636395] env[63024]: value = "task-1951677" [ 2077.636395] env[63024]: _type = "Task" [ 2077.636395] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2077.645237] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60988b80-a1ab-43b1-9bff-1a23f3a29c12 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.652534] env[63024]: DEBUG oslo_vmware.api [None req-66d44e09-efec-4ca1-84ee-66480935d1c3 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951677, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2077.654205] env[63024]: DEBUG nova.network.neutron [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Successfully updated port: a75a35bb-1971-4617-9a1a-5750c7485384 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2077.664861] env[63024]: DEBUG nova.compute.provider_tree [None req-fb7841a6-414c-498d-b9c6-f9dd94a3e267 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2077.690732] env[63024]: DEBUG nova.compute.manager [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2077.708713] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7c674c61-eb7f-40e8-868f-79e9a79fbcbf tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Acquiring lock "c5541241-84e2-4216-b6f9-4c716f29d759" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2077.709248] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7c674c61-eb7f-40e8-868f-79e9a79fbcbf tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Lock "c5541241-84e2-4216-b6f9-4c716f29d759" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2077.709248] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7c674c61-eb7f-40e8-868f-79e9a79fbcbf tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Acquiring lock "c5541241-84e2-4216-b6f9-4c716f29d759-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2077.709675] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7c674c61-eb7f-40e8-868f-79e9a79fbcbf tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Lock "c5541241-84e2-4216-b6f9-4c716f29d759-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2077.709724] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7c674c61-eb7f-40e8-868f-79e9a79fbcbf tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Lock "c5541241-84e2-4216-b6f9-4c716f29d759-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2077.711921] env[63024]: INFO nova.compute.manager [None req-7c674c61-eb7f-40e8-868f-79e9a79fbcbf tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Terminating instance [ 2077.879909] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951674, 'name': CreateVM_Task, 'duration_secs': 0.489064} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2077.880374] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2077.881106] env[63024]: DEBUG oslo_concurrency.lockutils [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2077.881280] env[63024]: DEBUG oslo_concurrency.lockutils [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2077.881612] env[63024]: DEBUG oslo_concurrency.lockutils [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2077.881867] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8619bdf5-b4f9-4ccc-892d-337d3290dd73 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.886322] env[63024]: DEBUG oslo_vmware.api [None req-070e8c49-7aa2-4298-b2e6-44d775335cd0 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951675, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.2766} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2077.887114] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-070e8c49-7aa2-4298-b2e6-44d775335cd0 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2077.887114] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-070e8c49-7aa2-4298-b2e6-44d775335cd0 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2077.887281] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-070e8c49-7aa2-4298-b2e6-44d775335cd0 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2077.887390] env[63024]: INFO nova.compute.manager [None req-070e8c49-7aa2-4298-b2e6-44d775335cd0 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Took 1.25 seconds to destroy the instance on the hypervisor. [ 2077.887621] env[63024]: DEBUG oslo.service.loopingcall [None req-070e8c49-7aa2-4298-b2e6-44d775335cd0 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2077.887800] env[63024]: DEBUG nova.compute.manager [-] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2077.887893] env[63024]: DEBUG nova.network.neutron [-] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2077.890521] env[63024]: DEBUG oslo_vmware.api [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Waiting for the task: (returnval){ [ 2077.890521] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5292ec25-b397-d0f5-3b39-151d904190d0" [ 2077.890521] env[63024]: _type = "Task" [ 2077.890521] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2077.899187] env[63024]: DEBUG oslo_vmware.api [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5292ec25-b397-d0f5-3b39-151d904190d0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2077.926780] env[63024]: DEBUG oslo_vmware.api [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951676, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081511} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2077.927084] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2077.927905] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c38244d-69e1-4559-b153-2e47e48172ba {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.950373] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] 0d253199-adf8-45c0-a6bf-b11c12b08688/0d253199-adf8-45c0-a6bf-b11c12b08688.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2077.950647] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-470125a9-ad26-4a1b-b4a2-f60423e0f371 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2077.965082] env[63024]: DEBUG nova.network.neutron [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Successfully created port: 60d617ea-e957-4bd3-839b-2036b3433064 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2077.973271] env[63024]: DEBUG oslo_vmware.api [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 2077.973271] env[63024]: value = "task-1951678" [ 2077.973271] env[63024]: _type = "Task" [ 2077.973271] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2077.982254] env[63024]: DEBUG oslo_vmware.api [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951678, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2078.009459] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cfa53e02-76a5-4151-b762-d6b3070f505c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Acquiring lock "92d1f96e-bbe7-4654-9d3a-47ba40057157" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2078.009730] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cfa53e02-76a5-4151-b762-d6b3070f505c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Lock "92d1f96e-bbe7-4654-9d3a-47ba40057157" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2078.009942] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cfa53e02-76a5-4151-b762-d6b3070f505c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Acquiring lock "92d1f96e-bbe7-4654-9d3a-47ba40057157-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2078.010145] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cfa53e02-76a5-4151-b762-d6b3070f505c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Lock "92d1f96e-bbe7-4654-9d3a-47ba40057157-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2078.010322] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cfa53e02-76a5-4151-b762-d6b3070f505c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Lock "92d1f96e-bbe7-4654-9d3a-47ba40057157-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2078.012679] env[63024]: INFO nova.compute.manager [None req-cfa53e02-76a5-4151-b762-d6b3070f505c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Terminating instance [ 2078.147014] env[63024]: DEBUG oslo_vmware.api [None req-66d44e09-efec-4ca1-84ee-66480935d1c3 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951677, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2078.169596] env[63024]: DEBUG oslo_concurrency.lockutils [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "refresh_cache-881b1f35-206e-4c3f-bf7a-d1774a9343c2" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2078.169596] env[63024]: DEBUG oslo_concurrency.lockutils [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquired lock "refresh_cache-881b1f35-206e-4c3f-bf7a-d1774a9343c2" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2078.169596] env[63024]: DEBUG nova.network.neutron [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2078.169596] env[63024]: DEBUG nova.scheduler.client.report [None req-fb7841a6-414c-498d-b9c6-f9dd94a3e267 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2078.218118] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7c674c61-eb7f-40e8-868f-79e9a79fbcbf tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Acquiring lock "refresh_cache-c5541241-84e2-4216-b6f9-4c716f29d759" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2078.218118] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7c674c61-eb7f-40e8-868f-79e9a79fbcbf tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Acquired lock "refresh_cache-c5541241-84e2-4216-b6f9-4c716f29d759" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2078.218118] env[63024]: DEBUG nova.network.neutron [None req-7c674c61-eb7f-40e8-868f-79e9a79fbcbf tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2078.402414] env[63024]: DEBUG oslo_vmware.api [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5292ec25-b397-d0f5-3b39-151d904190d0, 'name': SearchDatastore_Task, 'duration_secs': 0.053392} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2078.402738] env[63024]: DEBUG oslo_concurrency.lockutils [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2078.403039] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2078.403306] env[63024]: DEBUG oslo_concurrency.lockutils [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2078.403462] env[63024]: DEBUG oslo_concurrency.lockutils [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2078.403644] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2078.403912] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f5bafbe5-6781-4213-998e-bf718a67b439 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.419899] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2078.419998] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2078.420727] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-24dd47bc-f6ec-444c-af2c-81f05c0095f0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.426256] env[63024]: DEBUG oslo_vmware.api [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Waiting for the task: (returnval){ [ 2078.426256] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5249a918-62c7-98c0-da11-2242f25aab35" [ 2078.426256] env[63024]: _type = "Task" [ 2078.426256] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2078.434496] env[63024]: DEBUG oslo_vmware.api [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5249a918-62c7-98c0-da11-2242f25aab35, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2078.485657] env[63024]: DEBUG oslo_vmware.api [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951678, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2078.516139] env[63024]: DEBUG nova.compute.manager [None req-cfa53e02-76a5-4151-b762-d6b3070f505c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2078.516389] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-cfa53e02-76a5-4151-b762-d6b3070f505c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2078.517380] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87b8186d-d23a-4464-ad42-8075d07e1372 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.526510] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfa53e02-76a5-4151-b762-d6b3070f505c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2078.526859] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cada58a8-4cb1-457b-b183-e2c47b19a433 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.534262] env[63024]: DEBUG nova.compute.manager [req-64ea47fd-89f0-45da-a31f-a67181e9e203 req-d2740c2d-9674-4c54-b786-b9c56950b030 service nova] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Received event network-vif-plugged-a75a35bb-1971-4617-9a1a-5750c7485384 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2078.534498] env[63024]: DEBUG oslo_concurrency.lockutils [req-64ea47fd-89f0-45da-a31f-a67181e9e203 req-d2740c2d-9674-4c54-b786-b9c56950b030 service nova] Acquiring lock "881b1f35-206e-4c3f-bf7a-d1774a9343c2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2078.534728] env[63024]: DEBUG oslo_concurrency.lockutils [req-64ea47fd-89f0-45da-a31f-a67181e9e203 req-d2740c2d-9674-4c54-b786-b9c56950b030 service nova] Lock "881b1f35-206e-4c3f-bf7a-d1774a9343c2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2078.534927] env[63024]: DEBUG oslo_concurrency.lockutils [req-64ea47fd-89f0-45da-a31f-a67181e9e203 req-d2740c2d-9674-4c54-b786-b9c56950b030 service nova] Lock "881b1f35-206e-4c3f-bf7a-d1774a9343c2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2078.535188] env[63024]: DEBUG nova.compute.manager [req-64ea47fd-89f0-45da-a31f-a67181e9e203 req-d2740c2d-9674-4c54-b786-b9c56950b030 service nova] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] No waiting events found dispatching network-vif-plugged-a75a35bb-1971-4617-9a1a-5750c7485384 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2078.535421] env[63024]: WARNING nova.compute.manager [req-64ea47fd-89f0-45da-a31f-a67181e9e203 req-d2740c2d-9674-4c54-b786-b9c56950b030 service nova] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Received unexpected event network-vif-plugged-a75a35bb-1971-4617-9a1a-5750c7485384 for instance with vm_state building and task_state spawning. [ 2078.535617] env[63024]: DEBUG nova.compute.manager [req-64ea47fd-89f0-45da-a31f-a67181e9e203 req-d2740c2d-9674-4c54-b786-b9c56950b030 service nova] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Received event network-changed-a75a35bb-1971-4617-9a1a-5750c7485384 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2078.536194] env[63024]: DEBUG nova.compute.manager [req-64ea47fd-89f0-45da-a31f-a67181e9e203 req-d2740c2d-9674-4c54-b786-b9c56950b030 service nova] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Refreshing instance network info cache due to event network-changed-a75a35bb-1971-4617-9a1a-5750c7485384. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2078.536194] env[63024]: DEBUG oslo_concurrency.lockutils [req-64ea47fd-89f0-45da-a31f-a67181e9e203 req-d2740c2d-9674-4c54-b786-b9c56950b030 service nova] Acquiring lock "refresh_cache-881b1f35-206e-4c3f-bf7a-d1774a9343c2" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2078.538453] env[63024]: DEBUG oslo_vmware.api [None req-cfa53e02-76a5-4151-b762-d6b3070f505c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Waiting for the task: (returnval){ [ 2078.538453] env[63024]: value = "task-1951679" [ 2078.538453] env[63024]: _type = "Task" [ 2078.538453] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2078.551468] env[63024]: DEBUG oslo_vmware.api [None req-cfa53e02-76a5-4151-b762-d6b3070f505c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951679, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2078.563691] env[63024]: DEBUG nova.network.neutron [req-cad37649-e94c-420e-9ba8-0f1c3afa47b0 req-1f70fc53-b1bc-4fa8-8476-2267a81d9f6e service nova] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Updated VIF entry in instance network info cache for port 38ea7e67-f8df-4441-93e8-2983babd9f62. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2078.564207] env[63024]: DEBUG nova.network.neutron [req-cad37649-e94c-420e-9ba8-0f1c3afa47b0 req-1f70fc53-b1bc-4fa8-8476-2267a81d9f6e service nova] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Updating instance_info_cache with network_info: [{"id": "38ea7e67-f8df-4441-93e8-2983babd9f62", "address": "fa:16:3e:2f:ad:7e", "network": {"id": "83ed1c04-a2e0-4c15-ae35-68e988607ce4", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-470202335-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5dcb1fcc9fd945cb9f4477fe1cce3f5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38ea7e67-f8", "ovs_interfaceid": "38ea7e67-f8df-4441-93e8-2983babd9f62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2078.649543] env[63024]: DEBUG oslo_vmware.api [None req-66d44e09-efec-4ca1-84ee-66480935d1c3 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951677, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2078.702139] env[63024]: DEBUG nova.compute.manager [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2078.733372] env[63024]: DEBUG nova.virt.hardware [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2078.733665] env[63024]: DEBUG nova.virt.hardware [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2078.733847] env[63024]: DEBUG nova.virt.hardware [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2078.734095] env[63024]: DEBUG nova.virt.hardware [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2078.734274] env[63024]: DEBUG nova.virt.hardware [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2078.734447] env[63024]: DEBUG nova.virt.hardware [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2078.734675] env[63024]: DEBUG nova.virt.hardware [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2078.734851] env[63024]: DEBUG nova.virt.hardware [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2078.735029] env[63024]: DEBUG nova.virt.hardware [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2078.735247] env[63024]: DEBUG nova.virt.hardware [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2078.735392] env[63024]: DEBUG nova.virt.hardware [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2078.736297] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd232136-f160-4d54-926c-3e9a1a5d0f6d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.745738] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce8e6ad7-9129-4788-acdf-6afd6baf9635 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.762077] env[63024]: DEBUG nova.network.neutron [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2078.765234] env[63024]: DEBUG nova.network.neutron [None req-7c674c61-eb7f-40e8-868f-79e9a79fbcbf tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2078.937777] env[63024]: DEBUG oslo_vmware.api [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5249a918-62c7-98c0-da11-2242f25aab35, 'name': SearchDatastore_Task, 'duration_secs': 0.026041} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2078.938672] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1fea6819-fbb5-4680-98ca-51d4a59a19fc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.945928] env[63024]: DEBUG oslo_vmware.api [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Waiting for the task: (returnval){ [ 2078.945928] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]523d4dde-b3c1-e5ee-ca2c-50da1b4e5b77" [ 2078.945928] env[63024]: _type = "Task" [ 2078.945928] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2078.954539] env[63024]: DEBUG oslo_vmware.api [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]523d4dde-b3c1-e5ee-ca2c-50da1b4e5b77, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2078.983072] env[63024]: DEBUG oslo_vmware.api [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951678, 'name': ReconfigVM_Task, 'duration_secs': 0.716244} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2078.983391] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Reconfigured VM instance instance-00000067 to attach disk [datastore1] 0d253199-adf8-45c0-a6bf-b11c12b08688/0d253199-adf8-45c0-a6bf-b11c12b08688.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2078.984083] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e7ea49b9-e389-448f-a387-fd16dfe107c8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2078.993274] env[63024]: DEBUG oslo_vmware.api [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 2078.993274] env[63024]: value = "task-1951680" [ 2078.993274] env[63024]: _type = "Task" [ 2078.993274] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2079.000415] env[63024]: DEBUG nova.network.neutron [None req-7c674c61-eb7f-40e8-868f-79e9a79fbcbf tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2079.004982] env[63024]: DEBUG oslo_vmware.api [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951680, 'name': Rename_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2079.049468] env[63024]: DEBUG oslo_vmware.api [None req-cfa53e02-76a5-4151-b762-d6b3070f505c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951679, 'name': PowerOffVM_Task, 'duration_secs': 0.180456} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2079.049764] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-cfa53e02-76a5-4151-b762-d6b3070f505c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2079.049928] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-cfa53e02-76a5-4151-b762-d6b3070f505c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2079.050296] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d6ae82ec-6259-46bd-9edd-07bf69406e28 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.067213] env[63024]: DEBUG oslo_concurrency.lockutils [req-cad37649-e94c-420e-9ba8-0f1c3afa47b0 req-1f70fc53-b1bc-4fa8-8476-2267a81d9f6e service nova] Releasing lock "refresh_cache-5192ad93-a4e9-4aa0-983d-186ab17360f0" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2079.149805] env[63024]: DEBUG oslo_vmware.api [None req-66d44e09-efec-4ca1-84ee-66480935d1c3 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951677, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2079.181799] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fb7841a6-414c-498d-b9c6-f9dd94a3e267 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.513s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2079.186627] env[63024]: DEBUG oslo_concurrency.lockutils [None req-de55f60e-a5ba-41ee-9e4b-1715b36a73fd tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.770s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2079.186854] env[63024]: DEBUG oslo_concurrency.lockutils [None req-de55f60e-a5ba-41ee-9e4b-1715b36a73fd tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2079.189206] env[63024]: DEBUG oslo_concurrency.lockutils [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.012s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2079.190721] env[63024]: INFO nova.compute.claims [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2079.193319] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-cfa53e02-76a5-4151-b762-d6b3070f505c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2079.193611] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-cfa53e02-76a5-4151-b762-d6b3070f505c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2079.193734] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfa53e02-76a5-4151-b762-d6b3070f505c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Deleting the datastore file [datastore1] 92d1f96e-bbe7-4654-9d3a-47ba40057157 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2079.194854] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2152a23a-6edf-4bca-a1da-e2826a6245ff {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.204214] env[63024]: DEBUG oslo_vmware.api [None req-cfa53e02-76a5-4151-b762-d6b3070f505c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Waiting for the task: (returnval){ [ 2079.204214] env[63024]: value = "task-1951682" [ 2079.204214] env[63024]: _type = "Task" [ 2079.204214] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2079.208723] env[63024]: DEBUG nova.network.neutron [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Updating instance_info_cache with network_info: [{"id": "a75a35bb-1971-4617-9a1a-5750c7485384", "address": "fa:16:3e:41:bd:60", "network": {"id": "c25a78c5-2c6d-4c87-8575-9620fbc983bd", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1693993539-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93098ad83ae144bf90a12c97ec863c06", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e66c4ebe-f808-4b34-bdb5-6c45edb1736f", "external-id": "cl2-zone-719", "segmentation_id": 719, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa75a35bb-19", "ovs_interfaceid": "a75a35bb-1971-4617-9a1a-5750c7485384", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2079.216550] env[63024]: DEBUG oslo_vmware.api [None req-cfa53e02-76a5-4151-b762-d6b3070f505c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951682, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2079.229629] env[63024]: INFO nova.scheduler.client.report [None req-de55f60e-a5ba-41ee-9e4b-1715b36a73fd tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Deleted allocations for instance b588ea21-dea0-4ee6-8f9e-12007d0a1ce1 [ 2079.459850] env[63024]: DEBUG oslo_vmware.api [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]523d4dde-b3c1-e5ee-ca2c-50da1b4e5b77, 'name': SearchDatastore_Task, 'duration_secs': 0.014129} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2079.459850] env[63024]: DEBUG oslo_concurrency.lockutils [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2079.459850] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 5192ad93-a4e9-4aa0-983d-186ab17360f0/5192ad93-a4e9-4aa0-983d-186ab17360f0.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2079.459850] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3e1355f9-97ba-47bc-8c7f-d9d02287ce4f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.468025] env[63024]: DEBUG oslo_vmware.api [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Waiting for the task: (returnval){ [ 2079.468025] env[63024]: value = "task-1951683" [ 2079.468025] env[63024]: _type = "Task" [ 2079.468025] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2079.474167] env[63024]: DEBUG oslo_vmware.api [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951683, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2079.510948] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7c674c61-eb7f-40e8-868f-79e9a79fbcbf tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Releasing lock "refresh_cache-c5541241-84e2-4216-b6f9-4c716f29d759" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2079.510948] env[63024]: DEBUG nova.compute.manager [None req-7c674c61-eb7f-40e8-868f-79e9a79fbcbf tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2079.510948] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-7c674c61-eb7f-40e8-868f-79e9a79fbcbf tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2079.510948] env[63024]: DEBUG oslo_vmware.api [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951680, 'name': Rename_Task, 'duration_secs': 0.19623} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2079.510948] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ba2a22f-f6a5-44ea-9f67-a009ccf158f4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.514807] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2079.516612] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8237bab8-caac-485f-81e8-1623b0d05820 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.523649] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c674c61-eb7f-40e8-868f-79e9a79fbcbf tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2079.525074] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8feef799-00be-4759-bc10-ea16f1119594 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.527045] env[63024]: DEBUG oslo_vmware.api [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 2079.527045] env[63024]: value = "task-1951684" [ 2079.527045] env[63024]: _type = "Task" [ 2079.527045] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2079.532807] env[63024]: DEBUG oslo_vmware.api [None req-7c674c61-eb7f-40e8-868f-79e9a79fbcbf tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Waiting for the task: (returnval){ [ 2079.532807] env[63024]: value = "task-1951685" [ 2079.532807] env[63024]: _type = "Task" [ 2079.532807] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2079.540485] env[63024]: DEBUG oslo_vmware.api [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951684, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2079.547589] env[63024]: DEBUG oslo_vmware.api [None req-7c674c61-eb7f-40e8-868f-79e9a79fbcbf tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Task: {'id': task-1951685, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2079.651027] env[63024]: DEBUG oslo_vmware.api [None req-66d44e09-efec-4ca1-84ee-66480935d1c3 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951677, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2079.713051] env[63024]: DEBUG oslo_concurrency.lockutils [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Releasing lock "refresh_cache-881b1f35-206e-4c3f-bf7a-d1774a9343c2" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2079.713509] env[63024]: DEBUG nova.compute.manager [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Instance network_info: |[{"id": "a75a35bb-1971-4617-9a1a-5750c7485384", "address": "fa:16:3e:41:bd:60", "network": {"id": "c25a78c5-2c6d-4c87-8575-9620fbc983bd", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1693993539-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93098ad83ae144bf90a12c97ec863c06", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e66c4ebe-f808-4b34-bdb5-6c45edb1736f", "external-id": "cl2-zone-719", "segmentation_id": 719, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa75a35bb-19", "ovs_interfaceid": "a75a35bb-1971-4617-9a1a-5750c7485384", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2079.713730] env[63024]: DEBUG oslo_concurrency.lockutils [req-64ea47fd-89f0-45da-a31f-a67181e9e203 req-d2740c2d-9674-4c54-b786-b9c56950b030 service nova] Acquired lock "refresh_cache-881b1f35-206e-4c3f-bf7a-d1774a9343c2" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2079.716046] env[63024]: DEBUG nova.network.neutron [req-64ea47fd-89f0-45da-a31f-a67181e9e203 req-d2740c2d-9674-4c54-b786-b9c56950b030 service nova] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Refreshing network info cache for port a75a35bb-1971-4617-9a1a-5750c7485384 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2079.716046] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:41:bd:60', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e66c4ebe-f808-4b34-bdb5-6c45edb1736f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a75a35bb-1971-4617-9a1a-5750c7485384', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2079.734752] env[63024]: DEBUG oslo.service.loopingcall [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2079.743710] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2079.744421] env[63024]: DEBUG oslo_concurrency.lockutils [None req-de55f60e-a5ba-41ee-9e4b-1715b36a73fd tempest-ServerRescueNegativeTestJSON-375908407 tempest-ServerRescueNegativeTestJSON-375908407-project-member] Lock "b588ea21-dea0-4ee6-8f9e-12007d0a1ce1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.895s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2079.753741] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-adcb5339-c39a-42f0-a240-2425b557d9d3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.779479] env[63024]: DEBUG oslo_vmware.api [None req-cfa53e02-76a5-4151-b762-d6b3070f505c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Task: {'id': task-1951682, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.203662} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2079.781282] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-cfa53e02-76a5-4151-b762-d6b3070f505c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2079.781669] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-cfa53e02-76a5-4151-b762-d6b3070f505c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2079.781869] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-cfa53e02-76a5-4151-b762-d6b3070f505c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2079.782083] env[63024]: INFO nova.compute.manager [None req-cfa53e02-76a5-4151-b762-d6b3070f505c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Took 1.27 seconds to destroy the instance on the hypervisor. [ 2079.782306] env[63024]: DEBUG oslo.service.loopingcall [None req-cfa53e02-76a5-4151-b762-d6b3070f505c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2079.783568] env[63024]: DEBUG nova.compute.manager [-] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2079.783568] env[63024]: DEBUG nova.network.neutron [-] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2079.803660] env[63024]: INFO nova.scheduler.client.report [None req-fb7841a6-414c-498d-b9c6-f9dd94a3e267 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Deleted allocation for migration b427e72e-174f-4ebf-b6c0-a9329bf591f0 [ 2079.811789] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2079.811789] env[63024]: value = "task-1951686" [ 2079.811789] env[63024]: _type = "Task" [ 2079.811789] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2079.824051] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951686, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2079.979667] env[63024]: DEBUG oslo_vmware.api [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951683, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.507496} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2079.979959] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 5192ad93-a4e9-4aa0-983d-186ab17360f0/5192ad93-a4e9-4aa0-983d-186ab17360f0.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2079.980218] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2079.981135] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-90ebf7b6-4762-4ea4-93da-5e06261c1418 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2079.988920] env[63024]: DEBUG oslo_vmware.api [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Waiting for the task: (returnval){ [ 2079.988920] env[63024]: value = "task-1951687" [ 2079.988920] env[63024]: _type = "Task" [ 2079.988920] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2080.001918] env[63024]: DEBUG oslo_vmware.api [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951687, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2080.018589] env[63024]: DEBUG nova.network.neutron [-] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2080.041600] env[63024]: DEBUG oslo_vmware.api [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951684, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2080.047866] env[63024]: DEBUG oslo_vmware.api [None req-7c674c61-eb7f-40e8-868f-79e9a79fbcbf tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Task: {'id': task-1951685, 'name': PowerOffVM_Task, 'duration_secs': 0.410495} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2080.052019] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c674c61-eb7f-40e8-868f-79e9a79fbcbf tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2080.052019] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-7c674c61-eb7f-40e8-868f-79e9a79fbcbf tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2080.052019] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-07faddaa-282e-431d-bd6a-e71b81ce6592 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.083890] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-7c674c61-eb7f-40e8-868f-79e9a79fbcbf tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2080.084280] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-7c674c61-eb7f-40e8-868f-79e9a79fbcbf tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2080.084505] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c674c61-eb7f-40e8-868f-79e9a79fbcbf tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Deleting the datastore file [datastore1] c5541241-84e2-4216-b6f9-4c716f29d759 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2080.084697] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-431bbb91-ba4b-42ba-bd54-0d18768d36e2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.097506] env[63024]: DEBUG oslo_vmware.api [None req-7c674c61-eb7f-40e8-868f-79e9a79fbcbf tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Waiting for the task: (returnval){ [ 2080.097506] env[63024]: value = "task-1951689" [ 2080.097506] env[63024]: _type = "Task" [ 2080.097506] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2080.109119] env[63024]: DEBUG oslo_vmware.api [None req-7c674c61-eb7f-40e8-868f-79e9a79fbcbf tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Task: {'id': task-1951689, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2080.151175] env[63024]: DEBUG oslo_vmware.api [None req-66d44e09-efec-4ca1-84ee-66480935d1c3 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951677, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2080.311513] env[63024]: DEBUG oslo_concurrency.lockutils [None req-fb7841a6-414c-498d-b9c6-f9dd94a3e267 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "9716d592-32d1-4f1d-b42b-1c8a7d81d2f2" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 22.767s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2080.324089] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951686, 'name': CreateVM_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2080.462763] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c6de5c8-9717-41f4-bd33-291fa81fcae9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.471543] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1475d5d4-4099-4a35-9a91-c7435554b540 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.512745] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cef70da-2192-43e2-ad25-7865ad211ece {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.524402] env[63024]: INFO nova.compute.manager [-] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Took 2.64 seconds to deallocate network for instance. [ 2080.524950] env[63024]: DEBUG oslo_vmware.api [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951687, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071786} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2080.531294] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2080.535583] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-795eabd7-94c3-4bbe-a469-d394146f4c7e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.544031] env[63024]: DEBUG nova.network.neutron [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Successfully updated port: 60d617ea-e957-4bd3-839b-2036b3433064 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2080.544031] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12badd3d-8c53-42d1-b1b8-50f1d7229b0a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.566519] env[63024]: DEBUG nova.compute.provider_tree [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2080.568213] env[63024]: DEBUG oslo_vmware.api [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951684, 'name': PowerOnVM_Task, 'duration_secs': 0.887074} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2080.578246] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2080.578517] env[63024]: INFO nova.compute.manager [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Took 8.96 seconds to spawn the instance on the hypervisor. [ 2080.578768] env[63024]: DEBUG nova.compute.manager [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2080.589087] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] 5192ad93-a4e9-4aa0-983d-186ab17360f0/5192ad93-a4e9-4aa0-983d-186ab17360f0.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2080.591243] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1617534e-aa8b-4a28-a4df-5620e40dd416 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.594694] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6a03192a-8cf5-4538-a98c-af96185be6e8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.611124] env[63024]: DEBUG nova.compute.manager [req-a75657d4-34b6-47f1-a380-59aa99f42321 req-2af449e8-b3dd-4adc-8c01-10abc4fc3875 service nova] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Received event network-vif-deleted-7eeea323-6f39-4e16-8603-b463434191f8 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2080.611355] env[63024]: DEBUG nova.compute.manager [req-a75657d4-34b6-47f1-a380-59aa99f42321 req-2af449e8-b3dd-4adc-8c01-10abc4fc3875 service nova] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Received event network-vif-deleted-05bc00c8-444d-425a-8c1e-0d34f269c7e8 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2080.611529] env[63024]: INFO nova.compute.manager [req-a75657d4-34b6-47f1-a380-59aa99f42321 req-2af449e8-b3dd-4adc-8c01-10abc4fc3875 service nova] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Neutron deleted interface 05bc00c8-444d-425a-8c1e-0d34f269c7e8; detaching it from the instance and deleting it from the info cache [ 2080.611690] env[63024]: DEBUG nova.network.neutron [req-a75657d4-34b6-47f1-a380-59aa99f42321 req-2af449e8-b3dd-4adc-8c01-10abc4fc3875 service nova] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2080.615863] env[63024]: DEBUG nova.network.neutron [-] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2080.628735] env[63024]: DEBUG oslo_vmware.api [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Waiting for the task: (returnval){ [ 2080.628735] env[63024]: value = "task-1951690" [ 2080.628735] env[63024]: _type = "Task" [ 2080.628735] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2080.632942] env[63024]: DEBUG oslo_vmware.api [None req-7c674c61-eb7f-40e8-868f-79e9a79fbcbf tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Task: {'id': task-1951689, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.12707} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2080.635974] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c674c61-eb7f-40e8-868f-79e9a79fbcbf tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2080.636188] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-7c674c61-eb7f-40e8-868f-79e9a79fbcbf tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2080.636371] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-7c674c61-eb7f-40e8-868f-79e9a79fbcbf tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2080.636546] env[63024]: INFO nova.compute.manager [None req-7c674c61-eb7f-40e8-868f-79e9a79fbcbf tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Took 1.13 seconds to destroy the instance on the hypervisor. [ 2080.636778] env[63024]: DEBUG oslo.service.loopingcall [None req-7c674c61-eb7f-40e8-868f-79e9a79fbcbf tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2080.637417] env[63024]: DEBUG nova.compute.manager [-] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2080.637503] env[63024]: DEBUG nova.network.neutron [-] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2080.647363] env[63024]: DEBUG oslo_vmware.api [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951690, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2080.653108] env[63024]: DEBUG oslo_vmware.api [None req-66d44e09-efec-4ca1-84ee-66480935d1c3 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951677, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2080.669519] env[63024]: DEBUG nova.network.neutron [-] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2080.710326] env[63024]: DEBUG nova.network.neutron [req-64ea47fd-89f0-45da-a31f-a67181e9e203 req-d2740c2d-9674-4c54-b786-b9c56950b030 service nova] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Updated VIF entry in instance network info cache for port a75a35bb-1971-4617-9a1a-5750c7485384. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2080.710690] env[63024]: DEBUG nova.network.neutron [req-64ea47fd-89f0-45da-a31f-a67181e9e203 req-d2740c2d-9674-4c54-b786-b9c56950b030 service nova] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Updating instance_info_cache with network_info: [{"id": "a75a35bb-1971-4617-9a1a-5750c7485384", "address": "fa:16:3e:41:bd:60", "network": {"id": "c25a78c5-2c6d-4c87-8575-9620fbc983bd", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1693993539-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93098ad83ae144bf90a12c97ec863c06", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e66c4ebe-f808-4b34-bdb5-6c45edb1736f", "external-id": "cl2-zone-719", "segmentation_id": 719, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa75a35bb-19", "ovs_interfaceid": "a75a35bb-1971-4617-9a1a-5750c7485384", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2080.825651] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951686, 'name': CreateVM_Task, 'duration_secs': 0.798395} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2080.825651] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2080.826338] env[63024]: DEBUG oslo_concurrency.lockutils [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2080.826508] env[63024]: DEBUG oslo_concurrency.lockutils [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2080.826837] env[63024]: DEBUG oslo_concurrency.lockutils [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2080.827121] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee7983e7-fa52-4886-97ac-23fc274acc0b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2080.833631] env[63024]: DEBUG oslo_vmware.api [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2080.833631] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52896fc4-48cd-02f1-772e-94e74b5b6649" [ 2080.833631] env[63024]: _type = "Task" [ 2080.833631] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2080.842793] env[63024]: DEBUG oslo_vmware.api [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52896fc4-48cd-02f1-772e-94e74b5b6649, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2081.051561] env[63024]: DEBUG oslo_concurrency.lockutils [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Acquiring lock "refresh_cache-2aa624cb-b36a-43c9-8407-37383f196563" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2081.051717] env[63024]: DEBUG oslo_concurrency.lockutils [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Acquired lock "refresh_cache-2aa624cb-b36a-43c9-8407-37383f196563" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2081.051913] env[63024]: DEBUG nova.network.neutron [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2081.057064] env[63024]: DEBUG oslo_concurrency.lockutils [None req-070e8c49-7aa2-4298-b2e6-44d775335cd0 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2081.090701] env[63024]: DEBUG nova.scheduler.client.report [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2081.118036] env[63024]: INFO nova.compute.manager [-] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Took 1.33 seconds to deallocate network for instance. [ 2081.119938] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6757bddb-24cc-4570-88f7-0211c24e27e9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.143088] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0a1b7fb-7043-465f-a5cc-4c4d09355133 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.157102] env[63024]: INFO nova.compute.manager [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Took 38.94 seconds to build instance. [ 2081.171707] env[63024]: DEBUG nova.network.neutron [-] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2081.173069] env[63024]: DEBUG oslo_vmware.api [None req-66d44e09-efec-4ca1-84ee-66480935d1c3 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951677, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2081.177037] env[63024]: DEBUG oslo_vmware.api [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951690, 'name': ReconfigVM_Task, 'duration_secs': 0.468728} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2081.185734] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Reconfigured VM instance instance-00000068 to attach disk [datastore1] 5192ad93-a4e9-4aa0-983d-186ab17360f0/5192ad93-a4e9-4aa0-983d-186ab17360f0.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2081.187527] env[63024]: DEBUG nova.compute.manager [req-a75657d4-34b6-47f1-a380-59aa99f42321 req-2af449e8-b3dd-4adc-8c01-10abc4fc3875 service nova] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Detach interface failed, port_id=05bc00c8-444d-425a-8c1e-0d34f269c7e8, reason: Instance 92d1f96e-bbe7-4654-9d3a-47ba40057157 could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 2081.187896] env[63024]: DEBUG nova.compute.manager [req-a75657d4-34b6-47f1-a380-59aa99f42321 req-2af449e8-b3dd-4adc-8c01-10abc4fc3875 service nova] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Received event network-vif-plugged-60d617ea-e957-4bd3-839b-2036b3433064 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2081.187977] env[63024]: DEBUG oslo_concurrency.lockutils [req-a75657d4-34b6-47f1-a380-59aa99f42321 req-2af449e8-b3dd-4adc-8c01-10abc4fc3875 service nova] Acquiring lock "2aa624cb-b36a-43c9-8407-37383f196563-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2081.188144] env[63024]: DEBUG oslo_concurrency.lockutils [req-a75657d4-34b6-47f1-a380-59aa99f42321 req-2af449e8-b3dd-4adc-8c01-10abc4fc3875 service nova] Lock "2aa624cb-b36a-43c9-8407-37383f196563-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2081.188315] env[63024]: DEBUG oslo_concurrency.lockutils [req-a75657d4-34b6-47f1-a380-59aa99f42321 req-2af449e8-b3dd-4adc-8c01-10abc4fc3875 service nova] Lock "2aa624cb-b36a-43c9-8407-37383f196563-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2081.188482] env[63024]: DEBUG nova.compute.manager [req-a75657d4-34b6-47f1-a380-59aa99f42321 req-2af449e8-b3dd-4adc-8c01-10abc4fc3875 service nova] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] No waiting events found dispatching network-vif-plugged-60d617ea-e957-4bd3-839b-2036b3433064 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2081.188647] env[63024]: WARNING nova.compute.manager [req-a75657d4-34b6-47f1-a380-59aa99f42321 req-2af449e8-b3dd-4adc-8c01-10abc4fc3875 service nova] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Received unexpected event network-vif-plugged-60d617ea-e957-4bd3-839b-2036b3433064 for instance with vm_state building and task_state spawning. [ 2081.188811] env[63024]: DEBUG nova.compute.manager [req-a75657d4-34b6-47f1-a380-59aa99f42321 req-2af449e8-b3dd-4adc-8c01-10abc4fc3875 service nova] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Received event network-changed-60d617ea-e957-4bd3-839b-2036b3433064 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2081.188969] env[63024]: DEBUG nova.compute.manager [req-a75657d4-34b6-47f1-a380-59aa99f42321 req-2af449e8-b3dd-4adc-8c01-10abc4fc3875 service nova] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Refreshing instance network info cache due to event network-changed-60d617ea-e957-4bd3-839b-2036b3433064. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2081.189155] env[63024]: DEBUG oslo_concurrency.lockutils [req-a75657d4-34b6-47f1-a380-59aa99f42321 req-2af449e8-b3dd-4adc-8c01-10abc4fc3875 service nova] Acquiring lock "refresh_cache-2aa624cb-b36a-43c9-8407-37383f196563" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2081.189434] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ddc0d1f3-4ec9-40c1-88c4-df0ac68fcaf4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.198708] env[63024]: DEBUG oslo_vmware.api [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Waiting for the task: (returnval){ [ 2081.198708] env[63024]: value = "task-1951691" [ 2081.198708] env[63024]: _type = "Task" [ 2081.198708] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2081.209019] env[63024]: DEBUG oslo_vmware.api [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951691, 'name': Rename_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2081.213399] env[63024]: DEBUG oslo_concurrency.lockutils [req-64ea47fd-89f0-45da-a31f-a67181e9e203 req-d2740c2d-9674-4c54-b786-b9c56950b030 service nova] Releasing lock "refresh_cache-881b1f35-206e-4c3f-bf7a-d1774a9343c2" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2081.344546] env[63024]: DEBUG oslo_vmware.api [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52896fc4-48cd-02f1-772e-94e74b5b6649, 'name': SearchDatastore_Task, 'duration_secs': 0.035847} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2081.344871] env[63024]: DEBUG oslo_concurrency.lockutils [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2081.345127] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2081.345483] env[63024]: DEBUG oslo_concurrency.lockutils [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2081.345597] env[63024]: DEBUG oslo_concurrency.lockutils [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2081.345780] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2081.346078] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8d2dc360-00d6-4f08-a59a-7cf86c372c72 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.357610] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2081.357813] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2081.358802] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-75d6254b-45ed-4219-a4e8-ddc83e59b3e9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.364656] env[63024]: DEBUG oslo_vmware.api [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2081.364656] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52be7207-8871-488e-02eb-2e38a8a0468b" [ 2081.364656] env[63024]: _type = "Task" [ 2081.364656] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2081.373190] env[63024]: DEBUG oslo_vmware.api [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52be7207-8871-488e-02eb-2e38a8a0468b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2081.598897] env[63024]: DEBUG oslo_concurrency.lockutils [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.410s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2081.599959] env[63024]: DEBUG nova.compute.manager [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2081.602586] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.471s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2081.603829] env[63024]: INFO nova.compute.claims [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2081.618895] env[63024]: DEBUG nova.network.neutron [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2081.629949] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cfa53e02-76a5-4151-b762-d6b3070f505c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2081.663467] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f239f82e-c2e4-4c14-8902-3c51c8a9873f tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "0d253199-adf8-45c0-a6bf-b11c12b08688" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.454s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2081.671094] env[63024]: DEBUG oslo_vmware.api [None req-66d44e09-efec-4ca1-84ee-66480935d1c3 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951677, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2081.674350] env[63024]: INFO nova.compute.manager [-] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Took 1.04 seconds to deallocate network for instance. [ 2081.711670] env[63024]: DEBUG oslo_vmware.api [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951691, 'name': Rename_Task, 'duration_secs': 0.145038} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2081.711957] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2081.712246] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-100cc50c-20fe-43bb-9b55-9998c399c65b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.722845] env[63024]: DEBUG oslo_vmware.api [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Waiting for the task: (returnval){ [ 2081.722845] env[63024]: value = "task-1951692" [ 2081.722845] env[63024]: _type = "Task" [ 2081.722845] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2081.732717] env[63024]: DEBUG oslo_vmware.api [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951692, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2081.820424] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5f8f5a55-4135-441d-ac19-73ea22cf7817 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "9716d592-32d1-4f1d-b42b-1c8a7d81d2f2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2081.820718] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5f8f5a55-4135-441d-ac19-73ea22cf7817 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "9716d592-32d1-4f1d-b42b-1c8a7d81d2f2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2081.820942] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5f8f5a55-4135-441d-ac19-73ea22cf7817 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "9716d592-32d1-4f1d-b42b-1c8a7d81d2f2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2081.821528] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5f8f5a55-4135-441d-ac19-73ea22cf7817 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "9716d592-32d1-4f1d-b42b-1c8a7d81d2f2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2081.821528] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5f8f5a55-4135-441d-ac19-73ea22cf7817 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "9716d592-32d1-4f1d-b42b-1c8a7d81d2f2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2081.824757] env[63024]: INFO nova.compute.manager [None req-5f8f5a55-4135-441d-ac19-73ea22cf7817 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Terminating instance [ 2081.876778] env[63024]: DEBUG oslo_vmware.api [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52be7207-8871-488e-02eb-2e38a8a0468b, 'name': SearchDatastore_Task, 'duration_secs': 0.052384} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2081.877628] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b4c5498-8fb5-44ff-aca7-146219323fbd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2081.880730] env[63024]: DEBUG nova.network.neutron [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Updating instance_info_cache with network_info: [{"id": "60d617ea-e957-4bd3-839b-2036b3433064", "address": "fa:16:3e:d5:0e:aa", "network": {"id": "2f5cbe03-419b-4995-837c-3389d94c2be3", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1864944898-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a7fc70d467714e59b3c171a308feafdf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0d2101e-2d93-4310-a242-af2d9ecdaf9b", "external-id": "nsx-vlan-transportzone-121", "segmentation_id": 121, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60d617ea-e9", "ovs_interfaceid": "60d617ea-e957-4bd3-839b-2036b3433064", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2081.885750] env[63024]: DEBUG oslo_vmware.api [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2081.885750] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52469096-e56d-ca99-0a7b-0fb6fddeee1f" [ 2081.885750] env[63024]: _type = "Task" [ 2081.885750] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2081.896258] env[63024]: DEBUG oslo_vmware.api [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52469096-e56d-ca99-0a7b-0fb6fddeee1f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2082.109107] env[63024]: DEBUG nova.compute.utils [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2082.113305] env[63024]: DEBUG nova.compute.manager [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2082.113515] env[63024]: DEBUG nova.network.neutron [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2082.168955] env[63024]: DEBUG oslo_vmware.api [None req-66d44e09-efec-4ca1-84ee-66480935d1c3 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951677, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2082.182473] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7c674c61-eb7f-40e8-868f-79e9a79fbcbf tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2082.184233] env[63024]: DEBUG nova.policy [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '893bfe0d8eef423aae6c7eb5cdc1a9e9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '18540818b60e4483963d14559bc5c38d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 2082.237303] env[63024]: DEBUG oslo_vmware.api [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951692, 'name': PowerOnVM_Task, 'duration_secs': 0.4908} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2082.237303] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2082.237303] env[63024]: INFO nova.compute.manager [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Took 8.29 seconds to spawn the instance on the hypervisor. [ 2082.237303] env[63024]: DEBUG nova.compute.manager [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2082.237303] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2651e290-da0c-4633-b528-547f23992440 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.330857] env[63024]: DEBUG nova.compute.manager [None req-5f8f5a55-4135-441d-ac19-73ea22cf7817 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2082.331236] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5f8f5a55-4135-441d-ac19-73ea22cf7817 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2082.333500] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-046de403-10e6-40cb-8686-2422a28b1d40 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.347349] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f8f5a55-4135-441d-ac19-73ea22cf7817 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2082.348341] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fde84314-6b09-4bc4-82e0-c89e7fd5678a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.359902] env[63024]: DEBUG oslo_vmware.api [None req-5f8f5a55-4135-441d-ac19-73ea22cf7817 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 2082.359902] env[63024]: value = "task-1951693" [ 2082.359902] env[63024]: _type = "Task" [ 2082.359902] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2082.372236] env[63024]: DEBUG oslo_vmware.api [None req-5f8f5a55-4135-441d-ac19-73ea22cf7817 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951693, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2082.384067] env[63024]: DEBUG oslo_concurrency.lockutils [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Releasing lock "refresh_cache-2aa624cb-b36a-43c9-8407-37383f196563" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2082.384067] env[63024]: DEBUG nova.compute.manager [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Instance network_info: |[{"id": "60d617ea-e957-4bd3-839b-2036b3433064", "address": "fa:16:3e:d5:0e:aa", "network": {"id": "2f5cbe03-419b-4995-837c-3389d94c2be3", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1864944898-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a7fc70d467714e59b3c171a308feafdf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0d2101e-2d93-4310-a242-af2d9ecdaf9b", "external-id": "nsx-vlan-transportzone-121", "segmentation_id": 121, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60d617ea-e9", "ovs_interfaceid": "60d617ea-e957-4bd3-839b-2036b3433064", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2082.384337] env[63024]: DEBUG oslo_concurrency.lockutils [req-a75657d4-34b6-47f1-a380-59aa99f42321 req-2af449e8-b3dd-4adc-8c01-10abc4fc3875 service nova] Acquired lock "refresh_cache-2aa624cb-b36a-43c9-8407-37383f196563" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2082.388028] env[63024]: DEBUG nova.network.neutron [req-a75657d4-34b6-47f1-a380-59aa99f42321 req-2af449e8-b3dd-4adc-8c01-10abc4fc3875 service nova] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Refreshing network info cache for port 60d617ea-e957-4bd3-839b-2036b3433064 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2082.388028] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d5:0e:aa', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a0d2101e-2d93-4310-a242-af2d9ecdaf9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '60d617ea-e957-4bd3-839b-2036b3433064', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2082.395650] env[63024]: DEBUG oslo.service.loopingcall [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2082.397031] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2082.401615] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-030fe939-795a-4173-baf8-3de3ff6ac135 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.435366] env[63024]: DEBUG oslo_vmware.api [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52469096-e56d-ca99-0a7b-0fb6fddeee1f, 'name': SearchDatastore_Task, 'duration_secs': 0.012832} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2082.436715] env[63024]: DEBUG oslo_concurrency.lockutils [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2082.437020] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 881b1f35-206e-4c3f-bf7a-d1774a9343c2/881b1f35-206e-4c3f-bf7a-d1774a9343c2.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2082.437337] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2082.437337] env[63024]: value = "task-1951694" [ 2082.437337] env[63024]: _type = "Task" [ 2082.437337] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2082.437636] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-efc14a97-0eb8-4890-853f-ba197eb6ddf6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.454291] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951694, 'name': CreateVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2082.454546] env[63024]: DEBUG oslo_vmware.api [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2082.454546] env[63024]: value = "task-1951695" [ 2082.454546] env[63024]: _type = "Task" [ 2082.454546] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2082.463989] env[63024]: DEBUG oslo_vmware.api [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951695, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2082.614930] env[63024]: DEBUG nova.compute.manager [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2082.620355] env[63024]: DEBUG nova.network.neutron [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Successfully created port: 91603922-dbf0-46ad-86ff-46990886344a {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2082.642433] env[63024]: DEBUG nova.compute.manager [req-fe4b5535-88c1-40fe-8d90-9569433ccdd0 req-c6f15aa7-41a4-4933-b191-df392621bcf0 service nova] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Received event network-changed-5d2fbce5-45d5-4e27-bc41-6f77e528f245 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2082.642710] env[63024]: DEBUG nova.compute.manager [req-fe4b5535-88c1-40fe-8d90-9569433ccdd0 req-c6f15aa7-41a4-4933-b191-df392621bcf0 service nova] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Refreshing instance network info cache due to event network-changed-5d2fbce5-45d5-4e27-bc41-6f77e528f245. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2082.642979] env[63024]: DEBUG oslo_concurrency.lockutils [req-fe4b5535-88c1-40fe-8d90-9569433ccdd0 req-c6f15aa7-41a4-4933-b191-df392621bcf0 service nova] Acquiring lock "refresh_cache-0d253199-adf8-45c0-a6bf-b11c12b08688" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2082.643149] env[63024]: DEBUG oslo_concurrency.lockutils [req-fe4b5535-88c1-40fe-8d90-9569433ccdd0 req-c6f15aa7-41a4-4933-b191-df392621bcf0 service nova] Acquired lock "refresh_cache-0d253199-adf8-45c0-a6bf-b11c12b08688" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2082.643722] env[63024]: DEBUG nova.network.neutron [req-fe4b5535-88c1-40fe-8d90-9569433ccdd0 req-c6f15aa7-41a4-4933-b191-df392621bcf0 service nova] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Refreshing network info cache for port 5d2fbce5-45d5-4e27-bc41-6f77e528f245 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2082.676268] env[63024]: DEBUG oslo_vmware.api [None req-66d44e09-efec-4ca1-84ee-66480935d1c3 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951677, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2082.765630] env[63024]: INFO nova.compute.manager [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Took 32.61 seconds to build instance. [ 2082.873846] env[63024]: DEBUG oslo_vmware.api [None req-5f8f5a55-4135-441d-ac19-73ea22cf7817 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951693, 'name': PowerOffVM_Task, 'duration_secs': 0.237718} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2082.874151] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-5f8f5a55-4135-441d-ac19-73ea22cf7817 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2082.874331] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5f8f5a55-4135-441d-ac19-73ea22cf7817 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2082.874612] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-52ee33d2-7827-45e1-8d90-bba998638003 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.956911] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951694, 'name': CreateVM_Task, 'duration_secs': 0.473355} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2082.964064] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2082.967560] env[63024]: DEBUG oslo_concurrency.lockutils [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2082.967848] env[63024]: DEBUG oslo_concurrency.lockutils [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2082.968322] env[63024]: DEBUG oslo_concurrency.lockutils [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2082.969188] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5f8f5a55-4135-441d-ac19-73ea22cf7817 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2082.969439] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5f8f5a55-4135-441d-ac19-73ea22cf7817 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2082.969625] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f8f5a55-4135-441d-ac19-73ea22cf7817 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Deleting the datastore file [datastore1] 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2082.970433] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3f6b84b-2431-4937-96eb-34d7dd7b5a89 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.973635] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4ca49e5-7fab-4fec-a1b3-8cf886cb93f7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.975859] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-50d834e9-c542-4606-897e-b3344134534d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.981022] env[63024]: DEBUG oslo_vmware.api [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951695, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2082.986185] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dfc5adf-30e0-40f0-82e7-8ceec223d2f5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2082.989412] env[63024]: DEBUG oslo_vmware.api [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Waiting for the task: (returnval){ [ 2082.989412] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e17e95-093d-4283-5e16-0cac4d971ee7" [ 2082.989412] env[63024]: _type = "Task" [ 2082.989412] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2082.990710] env[63024]: DEBUG oslo_vmware.api [None req-5f8f5a55-4135-441d-ac19-73ea22cf7817 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 2082.990710] env[63024]: value = "task-1951697" [ 2082.990710] env[63024]: _type = "Task" [ 2082.990710] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2083.028142] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1959e6f-dbdb-4498-9fc9-bb9c0d735ad3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.036181] env[63024]: DEBUG oslo_vmware.api [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e17e95-093d-4283-5e16-0cac4d971ee7, 'name': SearchDatastore_Task, 'duration_secs': 0.010067} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2083.036440] env[63024]: DEBUG oslo_vmware.api [None req-5f8f5a55-4135-441d-ac19-73ea22cf7817 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951697, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2083.037131] env[63024]: DEBUG oslo_concurrency.lockutils [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2083.037407] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2083.037646] env[63024]: DEBUG oslo_concurrency.lockutils [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2083.037792] env[63024]: DEBUG oslo_concurrency.lockutils [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2083.037967] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2083.038256] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a3b42785-26c5-4ccd-9260-1ae3d4b7406b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.043710] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37e6ca1f-94b0-41c9-9626-84d6deb26f69 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.057589] env[63024]: DEBUG nova.compute.provider_tree [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2083.062097] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2083.062097] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2083.062097] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ddace39b-4dfc-48f7-b4b0-2c372dbbbe6a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.068124] env[63024]: DEBUG oslo_vmware.api [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Waiting for the task: (returnval){ [ 2083.068124] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52d4c0da-19d2-ec4c-a43d-99213d816d75" [ 2083.068124] env[63024]: _type = "Task" [ 2083.068124] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2083.078234] env[63024]: DEBUG oslo_vmware.api [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52d4c0da-19d2-ec4c-a43d-99213d816d75, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2083.171491] env[63024]: DEBUG oslo_vmware.api [None req-66d44e09-efec-4ca1-84ee-66480935d1c3 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951677, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2083.265337] env[63024]: DEBUG oslo_concurrency.lockutils [None req-10553f6f-5644-410c-af03-48483c8a0473 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lock "5192ad93-a4e9-4aa0-983d-186ab17360f0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.119s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2083.469466] env[63024]: DEBUG oslo_vmware.api [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951695, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.54681} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2083.469751] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 881b1f35-206e-4c3f-bf7a-d1774a9343c2/881b1f35-206e-4c3f-bf7a-d1774a9343c2.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2083.469964] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2083.470258] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d8c5e5c9-15cd-49b6-8051-4eebd8334aae {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.478945] env[63024]: DEBUG oslo_vmware.api [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2083.478945] env[63024]: value = "task-1951699" [ 2083.478945] env[63024]: _type = "Task" [ 2083.478945] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2083.487947] env[63024]: DEBUG oslo_vmware.api [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951699, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2083.511390] env[63024]: DEBUG oslo_vmware.api [None req-5f8f5a55-4135-441d-ac19-73ea22cf7817 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951697, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.178669} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2083.511665] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f8f5a55-4135-441d-ac19-73ea22cf7817 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2083.511851] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5f8f5a55-4135-441d-ac19-73ea22cf7817 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2083.513096] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5f8f5a55-4135-441d-ac19-73ea22cf7817 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2083.513355] env[63024]: INFO nova.compute.manager [None req-5f8f5a55-4135-441d-ac19-73ea22cf7817 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Took 1.18 seconds to destroy the instance on the hypervisor. [ 2083.513636] env[63024]: DEBUG oslo.service.loopingcall [None req-5f8f5a55-4135-441d-ac19-73ea22cf7817 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2083.513846] env[63024]: DEBUG nova.compute.manager [-] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2083.513955] env[63024]: DEBUG nova.network.neutron [-] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2083.564347] env[63024]: DEBUG nova.scheduler.client.report [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2083.581585] env[63024]: DEBUG oslo_vmware.api [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52d4c0da-19d2-ec4c-a43d-99213d816d75, 'name': SearchDatastore_Task, 'duration_secs': 0.010276} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2083.582990] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0543edbe-310b-4d5a-887f-cf5993130659 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.589630] env[63024]: DEBUG oslo_vmware.api [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Waiting for the task: (returnval){ [ 2083.589630] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b07dee-cdb7-35bb-b3a2-e25bd55d8183" [ 2083.589630] env[63024]: _type = "Task" [ 2083.589630] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2083.598475] env[63024]: DEBUG oslo_vmware.api [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b07dee-cdb7-35bb-b3a2-e25bd55d8183, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2083.632233] env[63024]: DEBUG nova.compute.manager [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2083.642225] env[63024]: DEBUG nova.network.neutron [req-a75657d4-34b6-47f1-a380-59aa99f42321 req-2af449e8-b3dd-4adc-8c01-10abc4fc3875 service nova] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Updated VIF entry in instance network info cache for port 60d617ea-e957-4bd3-839b-2036b3433064. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2083.643785] env[63024]: DEBUG nova.network.neutron [req-a75657d4-34b6-47f1-a380-59aa99f42321 req-2af449e8-b3dd-4adc-8c01-10abc4fc3875 service nova] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Updating instance_info_cache with network_info: [{"id": "60d617ea-e957-4bd3-839b-2036b3433064", "address": "fa:16:3e:d5:0e:aa", "network": {"id": "2f5cbe03-419b-4995-837c-3389d94c2be3", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-1864944898-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a7fc70d467714e59b3c171a308feafdf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0d2101e-2d93-4310-a242-af2d9ecdaf9b", "external-id": "nsx-vlan-transportzone-121", "segmentation_id": 121, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap60d617ea-e9", "ovs_interfaceid": "60d617ea-e957-4bd3-839b-2036b3433064", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2083.673587] env[63024]: DEBUG oslo_vmware.api [None req-66d44e09-efec-4ca1-84ee-66480935d1c3 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951677, 'name': ReconfigVM_Task, 'duration_secs': 5.82514} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2083.676067] env[63024]: DEBUG nova.virt.hardware [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2083.676298] env[63024]: DEBUG nova.virt.hardware [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2083.676493] env[63024]: DEBUG nova.virt.hardware [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2083.676629] env[63024]: DEBUG nova.virt.hardware [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2083.676776] env[63024]: DEBUG nova.virt.hardware [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2083.676923] env[63024]: DEBUG nova.virt.hardware [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2083.677274] env[63024]: DEBUG nova.virt.hardware [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2083.677630] env[63024]: DEBUG nova.virt.hardware [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2083.677823] env[63024]: DEBUG nova.virt.hardware [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2083.678027] env[63024]: DEBUG nova.virt.hardware [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2083.678177] env[63024]: DEBUG nova.virt.hardware [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2083.678469] env[63024]: DEBUG oslo_concurrency.lockutils [None req-66d44e09-efec-4ca1-84ee-66480935d1c3 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Releasing lock "34e4db8e-e0d9-4a27-9368-c5e711b51a29" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2083.678659] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-66d44e09-efec-4ca1-84ee-66480935d1c3 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Reconfigured VM to detach interface {{(pid=63024) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 2083.682490] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eefb713-d5a7-4f31-b0c1-3e311f24dca6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.691867] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d020a848-2ebe-45a3-b781-12cc0f4b8ce5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2083.866406] env[63024]: DEBUG nova.network.neutron [req-fe4b5535-88c1-40fe-8d90-9569433ccdd0 req-c6f15aa7-41a4-4933-b191-df392621bcf0 service nova] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Updated VIF entry in instance network info cache for port 5d2fbce5-45d5-4e27-bc41-6f77e528f245. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2083.866813] env[63024]: DEBUG nova.network.neutron [req-fe4b5535-88c1-40fe-8d90-9569433ccdd0 req-c6f15aa7-41a4-4933-b191-df392621bcf0 service nova] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Updating instance_info_cache with network_info: [{"id": "5d2fbce5-45d5-4e27-bc41-6f77e528f245", "address": "fa:16:3e:91:c6:2f", "network": {"id": "8b46e914-e3e8-40c4-8f9d-01d1f97bf25e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1558746173-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bbfeec6d47746328f185acd132e0d5a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afae6acd-1873-4228-9d5a-1cd5d4efe3e4", "external-id": "nsx-vlan-transportzone-183", "segmentation_id": 183, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5d2fbce5-45", "ovs_interfaceid": "5d2fbce5-45d5-4e27-bc41-6f77e528f245", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2083.988767] env[63024]: DEBUG oslo_vmware.api [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951699, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070136} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2083.989140] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2083.989924] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-038e05e1-f87d-4951-a556-87a7ecc31230 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.014018] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] 881b1f35-206e-4c3f-bf7a-d1774a9343c2/881b1f35-206e-4c3f-bf7a-d1774a9343c2.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2084.014018] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0b646040-4ca4-47e4-88bf-9211a6e773af {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.034233] env[63024]: DEBUG oslo_vmware.api [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2084.034233] env[63024]: value = "task-1951700" [ 2084.034233] env[63024]: _type = "Task" [ 2084.034233] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2084.043574] env[63024]: DEBUG oslo_vmware.api [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951700, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2084.076019] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.471s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2084.076019] env[63024]: DEBUG nova.compute.manager [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2084.079386] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.325s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2084.089022] env[63024]: INFO nova.compute.claims [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2084.106919] env[63024]: DEBUG oslo_vmware.api [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b07dee-cdb7-35bb-b3a2-e25bd55d8183, 'name': SearchDatastore_Task, 'duration_secs': 0.044828} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2084.106919] env[63024]: DEBUG oslo_concurrency.lockutils [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2084.109896] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 2aa624cb-b36a-43c9-8407-37383f196563/2aa624cb-b36a-43c9-8407-37383f196563.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2084.109896] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d5744e76-a4f7-42c8-91d0-e3943df49c41 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2084.116943] env[63024]: DEBUG oslo_vmware.api [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Waiting for the task: (returnval){ [ 2084.116943] env[63024]: value = "task-1951701" [ 2084.116943] env[63024]: _type = "Task" [ 2084.116943] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2084.128144] env[63024]: DEBUG oslo_vmware.api [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951701, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2084.147680] env[63024]: DEBUG oslo_concurrency.lockutils [req-a75657d4-34b6-47f1-a380-59aa99f42321 req-2af449e8-b3dd-4adc-8c01-10abc4fc3875 service nova] Releasing lock "refresh_cache-2aa624cb-b36a-43c9-8407-37383f196563" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2084.241610] env[63024]: DEBUG nova.compute.manager [req-6ef87085-3973-4c06-9bc7-861c6019dbc6 req-439cfead-4263-45db-836f-f453a0d2c057 service nova] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Received event network-changed-38ea7e67-f8df-4441-93e8-2983babd9f62 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2084.241851] env[63024]: DEBUG nova.compute.manager [req-6ef87085-3973-4c06-9bc7-861c6019dbc6 req-439cfead-4263-45db-836f-f453a0d2c057 service nova] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Refreshing instance network info cache due to event network-changed-38ea7e67-f8df-4441-93e8-2983babd9f62. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2084.242084] env[63024]: DEBUG oslo_concurrency.lockutils [req-6ef87085-3973-4c06-9bc7-861c6019dbc6 req-439cfead-4263-45db-836f-f453a0d2c057 service nova] Acquiring lock "refresh_cache-5192ad93-a4e9-4aa0-983d-186ab17360f0" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2084.242238] env[63024]: DEBUG oslo_concurrency.lockutils [req-6ef87085-3973-4c06-9bc7-861c6019dbc6 req-439cfead-4263-45db-836f-f453a0d2c057 service nova] Acquired lock "refresh_cache-5192ad93-a4e9-4aa0-983d-186ab17360f0" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2084.242400] env[63024]: DEBUG nova.network.neutron [req-6ef87085-3973-4c06-9bc7-861c6019dbc6 req-439cfead-4263-45db-836f-f453a0d2c057 service nova] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Refreshing network info cache for port 38ea7e67-f8df-4441-93e8-2983babd9f62 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2084.369918] env[63024]: DEBUG oslo_concurrency.lockutils [req-fe4b5535-88c1-40fe-8d90-9569433ccdd0 req-c6f15aa7-41a4-4933-b191-df392621bcf0 service nova] Releasing lock "refresh_cache-0d253199-adf8-45c0-a6bf-b11c12b08688" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2084.549852] env[63024]: DEBUG oslo_vmware.api [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951700, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2084.579833] env[63024]: DEBUG nova.compute.utils [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2084.589434] env[63024]: DEBUG nova.compute.manager [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2084.589844] env[63024]: DEBUG nova.network.neutron [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2084.636114] env[63024]: DEBUG oslo_vmware.api [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951701, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2084.740853] env[63024]: DEBUG nova.policy [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '42318303a9494635a6e67b85e89fc27d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3cb7c7cd6c854c49835a36ed244d7abc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 2085.026568] env[63024]: DEBUG nova.network.neutron [req-6ef87085-3973-4c06-9bc7-861c6019dbc6 req-439cfead-4263-45db-836f-f453a0d2c057 service nova] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Updated VIF entry in instance network info cache for port 38ea7e67-f8df-4441-93e8-2983babd9f62. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2085.026950] env[63024]: DEBUG nova.network.neutron [req-6ef87085-3973-4c06-9bc7-861c6019dbc6 req-439cfead-4263-45db-836f-f453a0d2c057 service nova] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Updating instance_info_cache with network_info: [{"id": "38ea7e67-f8df-4441-93e8-2983babd9f62", "address": "fa:16:3e:2f:ad:7e", "network": {"id": "83ed1c04-a2e0-4c15-ae35-68e988607ce4", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-470202335-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.232", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "5dcb1fcc9fd945cb9f4477fe1cce3f5b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap38ea7e67-f8", "ovs_interfaceid": "38ea7e67-f8df-4441-93e8-2983babd9f62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2085.046017] env[63024]: DEBUG oslo_vmware.api [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951700, 'name': ReconfigVM_Task, 'duration_secs': 0.579896} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2085.046347] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Reconfigured VM instance instance-00000069 to attach disk [datastore1] 881b1f35-206e-4c3f-bf7a-d1774a9343c2/881b1f35-206e-4c3f-bf7a-d1774a9343c2.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2085.047065] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e5501373-cee7-4a17-a2db-79f2cac6cdf5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.055723] env[63024]: DEBUG oslo_vmware.api [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2085.055723] env[63024]: value = "task-1951702" [ 2085.055723] env[63024]: _type = "Task" [ 2085.055723] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2085.065685] env[63024]: DEBUG oslo_vmware.api [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951702, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2085.068349] env[63024]: DEBUG nova.compute.manager [req-3b561fc3-6e7a-4fe8-b22d-ef3fc0ad056f req-27495ff3-281e-434e-b385-d97cbd06291e service nova] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Received event network-vif-plugged-91603922-dbf0-46ad-86ff-46990886344a {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2085.068600] env[63024]: DEBUG oslo_concurrency.lockutils [req-3b561fc3-6e7a-4fe8-b22d-ef3fc0ad056f req-27495ff3-281e-434e-b385-d97cbd06291e service nova] Acquiring lock "1666cff0-59bd-41a0-aa3c-d1e8fac3a49a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2085.068835] env[63024]: DEBUG oslo_concurrency.lockutils [req-3b561fc3-6e7a-4fe8-b22d-ef3fc0ad056f req-27495ff3-281e-434e-b385-d97cbd06291e service nova] Lock "1666cff0-59bd-41a0-aa3c-d1e8fac3a49a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2085.069016] env[63024]: DEBUG oslo_concurrency.lockutils [req-3b561fc3-6e7a-4fe8-b22d-ef3fc0ad056f req-27495ff3-281e-434e-b385-d97cbd06291e service nova] Lock "1666cff0-59bd-41a0-aa3c-d1e8fac3a49a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2085.069194] env[63024]: DEBUG nova.compute.manager [req-3b561fc3-6e7a-4fe8-b22d-ef3fc0ad056f req-27495ff3-281e-434e-b385-d97cbd06291e service nova] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] No waiting events found dispatching network-vif-plugged-91603922-dbf0-46ad-86ff-46990886344a {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2085.069363] env[63024]: WARNING nova.compute.manager [req-3b561fc3-6e7a-4fe8-b22d-ef3fc0ad056f req-27495ff3-281e-434e-b385-d97cbd06291e service nova] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Received unexpected event network-vif-plugged-91603922-dbf0-46ad-86ff-46990886344a for instance with vm_state building and task_state spawning. [ 2085.087049] env[63024]: DEBUG nova.compute.manager [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2085.132734] env[63024]: DEBUG oslo_vmware.api [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951701, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.776538} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2085.133032] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 2aa624cb-b36a-43c9-8407-37383f196563/2aa624cb-b36a-43c9-8407-37383f196563.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2085.133362] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2085.133724] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0ebefc18-6c05-4737-abfc-1ed2ff67cef4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.144226] env[63024]: DEBUG oslo_vmware.api [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Waiting for the task: (returnval){ [ 2085.144226] env[63024]: value = "task-1951703" [ 2085.144226] env[63024]: _type = "Task" [ 2085.144226] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2085.158758] env[63024]: DEBUG oslo_vmware.api [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951703, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2085.167687] env[63024]: DEBUG nova.network.neutron [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Successfully updated port: 91603922-dbf0-46ad-86ff-46990886344a {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2085.259125] env[63024]: DEBUG nova.compute.manager [req-5278c8ce-4dcb-427f-b630-221a62873b89 req-cf3f5f30-3b8f-491b-9175-3c8ddc68119d service nova] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Received event network-changed-91603922-dbf0-46ad-86ff-46990886344a {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2085.259125] env[63024]: DEBUG nova.compute.manager [req-5278c8ce-4dcb-427f-b630-221a62873b89 req-cf3f5f30-3b8f-491b-9175-3c8ddc68119d service nova] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Refreshing instance network info cache due to event network-changed-91603922-dbf0-46ad-86ff-46990886344a. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2085.259125] env[63024]: DEBUG oslo_concurrency.lockutils [req-5278c8ce-4dcb-427f-b630-221a62873b89 req-cf3f5f30-3b8f-491b-9175-3c8ddc68119d service nova] Acquiring lock "refresh_cache-1666cff0-59bd-41a0-aa3c-d1e8fac3a49a" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2085.259125] env[63024]: DEBUG oslo_concurrency.lockutils [req-5278c8ce-4dcb-427f-b630-221a62873b89 req-cf3f5f30-3b8f-491b-9175-3c8ddc68119d service nova] Acquired lock "refresh_cache-1666cff0-59bd-41a0-aa3c-d1e8fac3a49a" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2085.259125] env[63024]: DEBUG nova.network.neutron [req-5278c8ce-4dcb-427f-b630-221a62873b89 req-cf3f5f30-3b8f-491b-9175-3c8ddc68119d service nova] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Refreshing network info cache for port 91603922-dbf0-46ad-86ff-46990886344a {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2085.403583] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1741b29-87a6-4fa6-b873-7752f346b201 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.415212] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50fc3163-a70d-4575-9e9f-9230e57c0032 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.449303] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c407c0cc-c7c5-440e-bd5e-0015b3c24bd3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.459416] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5443a744-b3aa-4680-9a60-b3e1fffd6f65 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.475464] env[63024]: DEBUG nova.compute.provider_tree [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2085.481341] env[63024]: DEBUG nova.network.neutron [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] Successfully created port: 1ca53ff5-e854-46d6-ad35-04dc9c98d396 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2085.529894] env[63024]: DEBUG oslo_concurrency.lockutils [req-6ef87085-3973-4c06-9bc7-861c6019dbc6 req-439cfead-4263-45db-836f-f453a0d2c057 service nova] Releasing lock "refresh_cache-5192ad93-a4e9-4aa0-983d-186ab17360f0" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2085.567634] env[63024]: DEBUG oslo_vmware.api [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951702, 'name': Rename_Task, 'duration_secs': 0.330808} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2085.567634] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2085.567634] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2a9440f5-e0bb-4bb4-8a19-915473329587 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.573938] env[63024]: DEBUG oslo_vmware.api [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2085.573938] env[63024]: value = "task-1951704" [ 2085.573938] env[63024]: _type = "Task" [ 2085.573938] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2085.583175] env[63024]: DEBUG oslo_vmware.api [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951704, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2085.656323] env[63024]: DEBUG oslo_vmware.api [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951703, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068369} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2085.656595] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2085.657550] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60a0b212-2226-4ff7-96d7-01db38e2e7d8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.673591] env[63024]: DEBUG oslo_concurrency.lockutils [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "refresh_cache-1666cff0-59bd-41a0-aa3c-d1e8fac3a49a" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2085.682151] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] 2aa624cb-b36a-43c9-8407-37383f196563/2aa624cb-b36a-43c9-8407-37383f196563.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2085.682759] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0e53f18e-6e4c-4d83-8ffe-98862b4c5fb9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.703931] env[63024]: DEBUG oslo_vmware.api [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Waiting for the task: (returnval){ [ 2085.703931] env[63024]: value = "task-1951705" [ 2085.703931] env[63024]: _type = "Task" [ 2085.703931] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2085.712567] env[63024]: DEBUG nova.network.neutron [-] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2085.727573] env[63024]: DEBUG oslo_vmware.api [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951705, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2085.802670] env[63024]: DEBUG nova.network.neutron [req-5278c8ce-4dcb-427f-b630-221a62873b89 req-cf3f5f30-3b8f-491b-9175-3c8ddc68119d service nova] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2085.896209] env[63024]: DEBUG nova.network.neutron [req-5278c8ce-4dcb-427f-b630-221a62873b89 req-cf3f5f30-3b8f-491b-9175-3c8ddc68119d service nova] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2085.981025] env[63024]: DEBUG nova.scheduler.client.report [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2086.087713] env[63024]: DEBUG oslo_vmware.api [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951704, 'name': PowerOnVM_Task} progress is 87%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2086.101019] env[63024]: DEBUG nova.compute.manager [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2086.134017] env[63024]: DEBUG nova.virt.hardware [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2086.134017] env[63024]: DEBUG nova.virt.hardware [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2086.134017] env[63024]: DEBUG nova.virt.hardware [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2086.134295] env[63024]: DEBUG nova.virt.hardware [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2086.134587] env[63024]: DEBUG nova.virt.hardware [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2086.134873] env[63024]: DEBUG nova.virt.hardware [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2086.135220] env[63024]: DEBUG nova.virt.hardware [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2086.135512] env[63024]: DEBUG nova.virt.hardware [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2086.138018] env[63024]: DEBUG nova.virt.hardware [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2086.138018] env[63024]: DEBUG nova.virt.hardware [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2086.138018] env[63024]: DEBUG nova.virt.hardware [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2086.138018] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14025375-975b-4daa-a501-dfe7d104c34b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.147341] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09d0fc54-9b36-4dce-85d4-ba4bcaba0c38 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.215491] env[63024]: DEBUG oslo_concurrency.lockutils [None req-66d44e09-efec-4ca1-84ee-66480935d1c3 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "refresh_cache-34e4db8e-e0d9-4a27-9368-c5e711b51a29" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2086.215714] env[63024]: DEBUG oslo_concurrency.lockutils [None req-66d44e09-efec-4ca1-84ee-66480935d1c3 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquired lock "refresh_cache-34e4db8e-e0d9-4a27-9368-c5e711b51a29" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2086.215900] env[63024]: DEBUG nova.network.neutron [None req-66d44e09-efec-4ca1-84ee-66480935d1c3 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2086.217802] env[63024]: INFO nova.compute.manager [-] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Took 2.70 seconds to deallocate network for instance. [ 2086.218218] env[63024]: DEBUG oslo_vmware.api [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951705, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2086.398845] env[63024]: DEBUG oslo_concurrency.lockutils [req-5278c8ce-4dcb-427f-b630-221a62873b89 req-cf3f5f30-3b8f-491b-9175-3c8ddc68119d service nova] Releasing lock "refresh_cache-1666cff0-59bd-41a0-aa3c-d1e8fac3a49a" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2086.399267] env[63024]: DEBUG oslo_concurrency.lockutils [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquired lock "refresh_cache-1666cff0-59bd-41a0-aa3c-d1e8fac3a49a" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2086.399434] env[63024]: DEBUG nova.network.neutron [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2086.485243] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.407s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2086.485896] env[63024]: DEBUG nova.compute.manager [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2086.488722] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 9.206s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2086.488905] env[63024]: DEBUG nova.objects.instance [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63024) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 2086.586238] env[63024]: DEBUG oslo_vmware.api [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951704, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2086.720602] env[63024]: DEBUG oslo_vmware.api [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951705, 'name': ReconfigVM_Task, 'duration_secs': 0.894443} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2086.720923] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Reconfigured VM instance instance-0000006a to attach disk [datastore1] 2aa624cb-b36a-43c9-8407-37383f196563/2aa624cb-b36a-43c9-8407-37383f196563.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2086.721981] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-82d8b11a-f214-46bb-97f0-22549c56132f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.725495] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5f8f5a55-4135-441d-ac19-73ea22cf7817 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2086.729539] env[63024]: DEBUG oslo_vmware.api [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Waiting for the task: (returnval){ [ 2086.729539] env[63024]: value = "task-1951707" [ 2086.729539] env[63024]: _type = "Task" [ 2086.729539] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2086.739122] env[63024]: DEBUG oslo_vmware.api [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951707, 'name': Rename_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2086.993741] env[63024]: DEBUG nova.network.neutron [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2086.997328] env[63024]: DEBUG nova.compute.utils [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2087.007092] env[63024]: DEBUG nova.compute.manager [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2087.007092] env[63024]: DEBUG nova.network.neutron [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2087.087116] env[63024]: DEBUG oslo_vmware.api [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951704, 'name': PowerOnVM_Task, 'duration_secs': 1.093471} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2087.087420] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2087.087659] env[63024]: INFO nova.compute.manager [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Took 10.74 seconds to spawn the instance on the hypervisor. [ 2087.087805] env[63024]: DEBUG nova.compute.manager [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2087.088620] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09ddd103-3a6e-4ea8-9029-ec196df7b90d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.122927] env[63024]: DEBUG nova.compute.manager [req-735f98d2-1605-41a9-947b-2c1c6645c584 req-b6ae49b6-8d2c-43a5-8fa9-6d7bcf84c558 service nova] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] Received event network-vif-plugged-1ca53ff5-e854-46d6-ad35-04dc9c98d396 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2087.123402] env[63024]: DEBUG oslo_concurrency.lockutils [req-735f98d2-1605-41a9-947b-2c1c6645c584 req-b6ae49b6-8d2c-43a5-8fa9-6d7bcf84c558 service nova] Acquiring lock "e4d6e79b-f110-44c2-8201-926b57eeb68d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2087.123486] env[63024]: DEBUG oslo_concurrency.lockutils [req-735f98d2-1605-41a9-947b-2c1c6645c584 req-b6ae49b6-8d2c-43a5-8fa9-6d7bcf84c558 service nova] Lock "e4d6e79b-f110-44c2-8201-926b57eeb68d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2087.123604] env[63024]: DEBUG oslo_concurrency.lockutils [req-735f98d2-1605-41a9-947b-2c1c6645c584 req-b6ae49b6-8d2c-43a5-8fa9-6d7bcf84c558 service nova] Lock "e4d6e79b-f110-44c2-8201-926b57eeb68d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2087.123775] env[63024]: DEBUG nova.compute.manager [req-735f98d2-1605-41a9-947b-2c1c6645c584 req-b6ae49b6-8d2c-43a5-8fa9-6d7bcf84c558 service nova] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] No waiting events found dispatching network-vif-plugged-1ca53ff5-e854-46d6-ad35-04dc9c98d396 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2087.123936] env[63024]: WARNING nova.compute.manager [req-735f98d2-1605-41a9-947b-2c1c6645c584 req-b6ae49b6-8d2c-43a5-8fa9-6d7bcf84c558 service nova] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] Received unexpected event network-vif-plugged-1ca53ff5-e854-46d6-ad35-04dc9c98d396 for instance with vm_state building and task_state spawning. [ 2087.201367] env[63024]: DEBUG nova.policy [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '27151e89c8ee4ddd9285bff3795a82b2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0e53c02ad56640dc8cbc8839669b67bf', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 2087.240917] env[63024]: DEBUG oslo_vmware.api [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951707, 'name': Rename_Task, 'duration_secs': 0.271251} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2087.241211] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2087.241459] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2988e00c-3fe6-4935-8a88-573c7e0d52ca {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.249545] env[63024]: DEBUG oslo_vmware.api [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Waiting for the task: (returnval){ [ 2087.249545] env[63024]: value = "task-1951708" [ 2087.249545] env[63024]: _type = "Task" [ 2087.249545] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2087.261350] env[63024]: DEBUG oslo_vmware.api [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951708, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2087.506853] env[63024]: DEBUG nova.compute.manager [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2087.514499] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1237f708-1207-4203-ac71-9feefaa89278 tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.026s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2087.515474] env[63024]: DEBUG oslo_concurrency.lockutils [None req-070e8c49-7aa2-4298-b2e6-44d775335cd0 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.462s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2087.515696] env[63024]: DEBUG nova.objects.instance [None req-070e8c49-7aa2-4298-b2e6-44d775335cd0 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lazy-loading 'resources' on Instance uuid 3cf2726c-2551-4bbd-8032-006062cdcc39 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2087.562163] env[63024]: DEBUG nova.network.neutron [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] Successfully updated port: 1ca53ff5-e854-46d6-ad35-04dc9c98d396 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2087.611116] env[63024]: INFO nova.compute.manager [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Took 34.74 seconds to build instance. [ 2087.629441] env[63024]: DEBUG nova.compute.manager [req-64250a71-4b62-40ad-b117-7617df812270 req-6794fa7b-3c14-44fe-bf2b-bcdc2c1e25d2 service nova] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Received event network-vif-deleted-6e0e9732-b318-4b20-ad72-8c2bc07eaf34 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2087.645034] env[63024]: INFO nova.network.neutron [None req-66d44e09-efec-4ca1-84ee-66480935d1c3 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Port b0190de6-3c0b-430e-9952-40bdf36d8b58 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 2087.645034] env[63024]: DEBUG nova.network.neutron [None req-66d44e09-efec-4ca1-84ee-66480935d1c3 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Updating instance_info_cache with network_info: [{"id": "041c096f-ef1b-49ad-aadb-469b89fe4f25", "address": "fa:16:3e:8b:4e:6d", "network": {"id": "ffb24eaf-c6b6-414f-a69a-0c8806712ddd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-281590202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9521048e807c4ca2a6d2e74a72b829a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap041c096f-ef", "ovs_interfaceid": "041c096f-ef1b-49ad-aadb-469b89fe4f25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2087.762765] env[63024]: DEBUG oslo_vmware.api [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951708, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2087.787861] env[63024]: DEBUG nova.network.neutron [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Updating instance_info_cache with network_info: [{"id": "91603922-dbf0-46ad-86ff-46990886344a", "address": "fa:16:3e:3d:e2:67", "network": {"id": "b800daa8-f0f9-4823-92b0-ff8d853cc0ff", "bridge": "br-int", "label": "tempest-ServersTestJSON-1406445940-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18540818b60e4483963d14559bc5c38d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec763be6-4041-4651-8fd7-3820cf0ab86d", "external-id": "nsx-vlan-transportzone-943", "segmentation_id": 943, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91603922-db", "ovs_interfaceid": "91603922-dbf0-46ad-86ff-46990886344a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2088.072720] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Acquiring lock "refresh_cache-e4d6e79b-f110-44c2-8201-926b57eeb68d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2088.072720] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Acquired lock "refresh_cache-e4d6e79b-f110-44c2-8201-926b57eeb68d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2088.072720] env[63024]: DEBUG nova.network.neutron [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2088.114186] env[63024]: DEBUG oslo_concurrency.lockutils [None req-89b83903-c5f6-490c-b997-25c00fad81c7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "881b1f35-206e-4c3f-bf7a-d1774a9343c2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.256s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2088.147145] env[63024]: DEBUG oslo_concurrency.lockutils [None req-66d44e09-efec-4ca1-84ee-66480935d1c3 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Releasing lock "refresh_cache-34e4db8e-e0d9-4a27-9368-c5e711b51a29" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2088.186729] env[63024]: DEBUG nova.network.neutron [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Successfully created port: f5649c3a-8bab-4abb-a1a2-1d88b780eba2 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2088.264318] env[63024]: DEBUG oslo_vmware.api [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951708, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2088.277455] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5fcf9ff-f514-4fcd-8ab2-bde03539f7cb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.285768] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4397b3e3-daf6-4193-b522-9d911f89331d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.290678] env[63024]: DEBUG oslo_concurrency.lockutils [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Releasing lock "refresh_cache-1666cff0-59bd-41a0-aa3c-d1e8fac3a49a" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2088.290979] env[63024]: DEBUG nova.compute.manager [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Instance network_info: |[{"id": "91603922-dbf0-46ad-86ff-46990886344a", "address": "fa:16:3e:3d:e2:67", "network": {"id": "b800daa8-f0f9-4823-92b0-ff8d853cc0ff", "bridge": "br-int", "label": "tempest-ServersTestJSON-1406445940-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18540818b60e4483963d14559bc5c38d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec763be6-4041-4651-8fd7-3820cf0ab86d", "external-id": "nsx-vlan-transportzone-943", "segmentation_id": 943, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91603922-db", "ovs_interfaceid": "91603922-dbf0-46ad-86ff-46990886344a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2088.291489] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3d:e2:67', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ec763be6-4041-4651-8fd7-3820cf0ab86d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '91603922-dbf0-46ad-86ff-46990886344a', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2088.299344] env[63024]: DEBUG oslo.service.loopingcall [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2088.324952] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2088.325437] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7196be1c-52db-41ed-a002-a7721790608c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.339952] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe810089-b1c8-4bf5-98c6-7c812d24bca5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.348921] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08da1dc6-cb8d-4fa2-92d6-61d5890268e4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.353996] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2088.353996] env[63024]: value = "task-1951710" [ 2088.353996] env[63024]: _type = "Task" [ 2088.353996] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2088.366833] env[63024]: DEBUG nova.compute.provider_tree [None req-070e8c49-7aa2-4298-b2e6-44d775335cd0 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2088.374412] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951710, 'name': CreateVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2088.522708] env[63024]: DEBUG nova.compute.manager [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2088.549701] env[63024]: DEBUG nova.virt.hardware [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2088.549959] env[63024]: DEBUG nova.virt.hardware [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2088.550144] env[63024]: DEBUG nova.virt.hardware [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2088.550334] env[63024]: DEBUG nova.virt.hardware [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2088.550482] env[63024]: DEBUG nova.virt.hardware [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2088.550632] env[63024]: DEBUG nova.virt.hardware [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2088.550838] env[63024]: DEBUG nova.virt.hardware [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2088.550995] env[63024]: DEBUG nova.virt.hardware [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2088.551204] env[63024]: DEBUG nova.virt.hardware [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2088.551387] env[63024]: DEBUG nova.virt.hardware [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2088.551562] env[63024]: DEBUG nova.virt.hardware [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2088.552461] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f73025e-f7ac-419b-9fbd-e225c25bdafa {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.561559] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66f51cac-5d4a-4f24-b603-34698d190184 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.614085] env[63024]: DEBUG nova.network.neutron [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2088.656974] env[63024]: DEBUG oslo_concurrency.lockutils [None req-66d44e09-efec-4ca1-84ee-66480935d1c3 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "interface-34e4db8e-e0d9-4a27-9368-c5e711b51a29-b0190de6-3c0b-430e-9952-40bdf36d8b58" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 11.654s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2088.765057] env[63024]: DEBUG oslo_vmware.api [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951708, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2088.792792] env[63024]: DEBUG nova.network.neutron [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] Updating instance_info_cache with network_info: [{"id": "1ca53ff5-e854-46d6-ad35-04dc9c98d396", "address": "fa:16:3e:cb:53:a4", "network": {"id": "465c96ac-d4df-4c96-9ce2-b0e2a83919fb", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-182175607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3cb7c7cd6c854c49835a36ed244d7abc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e49a26b5-7b6b-41fd-8bed-4cd9a6c1a002", "external-id": "nsx-vlan-transportzone-506", "segmentation_id": 506, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ca53ff5-e8", "ovs_interfaceid": "1ca53ff5-e854-46d6-ad35-04dc9c98d396", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2088.864763] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951710, 'name': CreateVM_Task, 'duration_secs': 0.423061} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2088.864926] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2088.865605] env[63024]: DEBUG oslo_concurrency.lockutils [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2088.865776] env[63024]: DEBUG oslo_concurrency.lockutils [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2088.866106] env[63024]: DEBUG oslo_concurrency.lockutils [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2088.866355] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b4c012c-aab9-43a8-a327-1a612c1f22e5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.869811] env[63024]: DEBUG nova.scheduler.client.report [None req-070e8c49-7aa2-4298-b2e6-44d775335cd0 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2088.875942] env[63024]: DEBUG oslo_vmware.api [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 2088.875942] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]528dec96-96bc-e852-049e-3ce4584da1c4" [ 2088.875942] env[63024]: _type = "Task" [ 2088.875942] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2088.884498] env[63024]: DEBUG oslo_vmware.api [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]528dec96-96bc-e852-049e-3ce4584da1c4, 'name': SearchDatastore_Task, 'duration_secs': 0.009461} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2088.885305] env[63024]: DEBUG oslo_concurrency.lockutils [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2088.885538] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2088.885768] env[63024]: DEBUG oslo_concurrency.lockutils [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2088.885918] env[63024]: DEBUG oslo_concurrency.lockutils [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2088.886114] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2088.886587] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-523a73f4-cfb1-4b18-8baa-f59157fd3884 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.895147] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2088.895329] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2088.896045] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-29f63e8f-70ee-4fd0-be12-3464d9ef2a38 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2088.901693] env[63024]: DEBUG oslo_vmware.api [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 2088.901693] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52a5be78-5206-d2ec-6b7a-ad9606a2f820" [ 2088.901693] env[63024]: _type = "Task" [ 2088.901693] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2088.909689] env[63024]: DEBUG oslo_vmware.api [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52a5be78-5206-d2ec-6b7a-ad9606a2f820, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2088.963066] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8c04b7ed-8cda-4b9a-b385-eb39c94235ad tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "881b1f35-206e-4c3f-bf7a-d1774a9343c2" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2088.963394] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8c04b7ed-8cda-4b9a-b385-eb39c94235ad tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "881b1f35-206e-4c3f-bf7a-d1774a9343c2" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2089.265478] env[63024]: DEBUG oslo_vmware.api [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951708, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2089.295315] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Releasing lock "refresh_cache-e4d6e79b-f110-44c2-8201-926b57eeb68d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2089.295685] env[63024]: DEBUG nova.compute.manager [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] Instance network_info: |[{"id": "1ca53ff5-e854-46d6-ad35-04dc9c98d396", "address": "fa:16:3e:cb:53:a4", "network": {"id": "465c96ac-d4df-4c96-9ce2-b0e2a83919fb", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-182175607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3cb7c7cd6c854c49835a36ed244d7abc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e49a26b5-7b6b-41fd-8bed-4cd9a6c1a002", "external-id": "nsx-vlan-transportzone-506", "segmentation_id": 506, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ca53ff5-e8", "ovs_interfaceid": "1ca53ff5-e854-46d6-ad35-04dc9c98d396", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2089.296234] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cb:53:a4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e49a26b5-7b6b-41fd-8bed-4cd9a6c1a002', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1ca53ff5-e854-46d6-ad35-04dc9c98d396', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2089.304014] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Creating folder: Project (3cb7c7cd6c854c49835a36ed244d7abc). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2089.304678] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f995993d-7bfc-4e69-8d88-41b87c6569ea {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2089.316816] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Created folder: Project (3cb7c7cd6c854c49835a36ed244d7abc) in parent group-v401959. [ 2089.317018] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Creating folder: Instances. Parent ref: group-v402242. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2089.317254] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ac64966d-2031-49a4-9bdd-b73ce797efca {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2089.329167] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Created folder: Instances in parent group-v402242. [ 2089.329408] env[63024]: DEBUG oslo.service.loopingcall [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2089.329601] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2089.329807] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f201befd-820d-444d-a94c-1044b5657942 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2089.349364] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2089.349364] env[63024]: value = "task-1951713" [ 2089.349364] env[63024]: _type = "Task" [ 2089.349364] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2089.359622] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951713, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2089.377275] env[63024]: DEBUG oslo_concurrency.lockutils [None req-070e8c49-7aa2-4298-b2e6-44d775335cd0 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.862s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2089.380950] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cfa53e02-76a5-4151-b762-d6b3070f505c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.750s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2089.381319] env[63024]: DEBUG nova.objects.instance [None req-cfa53e02-76a5-4151-b762-d6b3070f505c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Lazy-loading 'resources' on Instance uuid 92d1f96e-bbe7-4654-9d3a-47ba40057157 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2089.401419] env[63024]: INFO nova.scheduler.client.report [None req-070e8c49-7aa2-4298-b2e6-44d775335cd0 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Deleted allocations for instance 3cf2726c-2551-4bbd-8032-006062cdcc39 [ 2089.417210] env[63024]: DEBUG oslo_vmware.api [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52a5be78-5206-d2ec-6b7a-ad9606a2f820, 'name': SearchDatastore_Task, 'duration_secs': 0.008738} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2089.417970] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5860f6a8-a2a8-4d44-8a33-a2bf0abe0b6c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2089.424201] env[63024]: DEBUG oslo_vmware.api [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 2089.424201] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52c4b4ed-4c46-cb09-8f5c-21802760e876" [ 2089.424201] env[63024]: _type = "Task" [ 2089.424201] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2089.436863] env[63024]: DEBUG oslo_vmware.api [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52c4b4ed-4c46-cb09-8f5c-21802760e876, 'name': SearchDatastore_Task, 'duration_secs': 0.009829} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2089.436863] env[63024]: DEBUG oslo_concurrency.lockutils [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2089.437019] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a/1666cff0-59bd-41a0-aa3c-d1e8fac3a49a.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2089.437208] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ba1b4b68-7b03-4b5e-80ae-00444e4f2248 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2089.445740] env[63024]: DEBUG oslo_vmware.api [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 2089.445740] env[63024]: value = "task-1951714" [ 2089.445740] env[63024]: _type = "Task" [ 2089.445740] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2089.455596] env[63024]: DEBUG oslo_vmware.api [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951714, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2089.466625] env[63024]: DEBUG nova.compute.utils [None req-8c04b7ed-8cda-4b9a-b385-eb39c94235ad tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2089.679319] env[63024]: DEBUG nova.compute.manager [req-8b4ea7d7-d655-4c92-a350-8f2a20843995 req-1c2263d5-5a15-48ae-91de-c6a9236032c0 service nova] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] Received event network-changed-1ca53ff5-e854-46d6-ad35-04dc9c98d396 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2089.679604] env[63024]: DEBUG nova.compute.manager [req-8b4ea7d7-d655-4c92-a350-8f2a20843995 req-1c2263d5-5a15-48ae-91de-c6a9236032c0 service nova] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] Refreshing instance network info cache due to event network-changed-1ca53ff5-e854-46d6-ad35-04dc9c98d396. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2089.679898] env[63024]: DEBUG oslo_concurrency.lockutils [req-8b4ea7d7-d655-4c92-a350-8f2a20843995 req-1c2263d5-5a15-48ae-91de-c6a9236032c0 service nova] Acquiring lock "refresh_cache-e4d6e79b-f110-44c2-8201-926b57eeb68d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2089.680112] env[63024]: DEBUG oslo_concurrency.lockutils [req-8b4ea7d7-d655-4c92-a350-8f2a20843995 req-1c2263d5-5a15-48ae-91de-c6a9236032c0 service nova] Acquired lock "refresh_cache-e4d6e79b-f110-44c2-8201-926b57eeb68d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2089.680340] env[63024]: DEBUG nova.network.neutron [req-8b4ea7d7-d655-4c92-a350-8f2a20843995 req-1c2263d5-5a15-48ae-91de-c6a9236032c0 service nova] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] Refreshing network info cache for port 1ca53ff5-e854-46d6-ad35-04dc9c98d396 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2089.720168] env[63024]: DEBUG nova.network.neutron [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Successfully updated port: f5649c3a-8bab-4abb-a1a2-1d88b780eba2 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2089.766914] env[63024]: DEBUG oslo_vmware.api [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951708, 'name': PowerOnVM_Task, 'duration_secs': 2.032629} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2089.767239] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2089.767439] env[63024]: INFO nova.compute.manager [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Took 11.07 seconds to spawn the instance on the hypervisor. [ 2089.767626] env[63024]: DEBUG nova.compute.manager [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2089.768522] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5b41a76-2b25-426a-ae0b-09d0995a9441 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2089.862041] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951713, 'name': CreateVM_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2089.913180] env[63024]: DEBUG oslo_concurrency.lockutils [None req-070e8c49-7aa2-4298-b2e6-44d775335cd0 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "3cf2726c-2551-4bbd-8032-006062cdcc39" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.778s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2089.959578] env[63024]: DEBUG oslo_vmware.api [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951714, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.508097} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2089.959858] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a/1666cff0-59bd-41a0-aa3c-d1e8fac3a49a.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2089.960086] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2089.960364] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0b5cb805-8d91-4866-8f6c-7fd5f8744195 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2089.968962] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8c04b7ed-8cda-4b9a-b385-eb39c94235ad tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "881b1f35-206e-4c3f-bf7a-d1774a9343c2" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.005s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2089.972080] env[63024]: DEBUG oslo_vmware.api [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 2089.972080] env[63024]: value = "task-1951715" [ 2089.972080] env[63024]: _type = "Task" [ 2089.972080] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2089.983435] env[63024]: DEBUG oslo_vmware.api [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951715, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2090.109817] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b872acc4-26c2-4024-9bd6-8948fad5032a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.119465] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b5e91ea-795f-4236-87ed-88ac63e2dc52 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.154290] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fa3012b-64c5-425d-889d-9411ded8c224 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.162072] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0da96cb4-b4d3-45f5-ae29-0728fc7b4a15 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.176059] env[63024]: DEBUG nova.compute.provider_tree [None req-cfa53e02-76a5-4151-b762-d6b3070f505c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2090.224136] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "refresh_cache-cb038d54-b785-4930-b8a5-b309c5f4b58d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2090.224405] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquired lock "refresh_cache-cb038d54-b785-4930-b8a5-b309c5f4b58d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2090.224601] env[63024]: DEBUG nova.network.neutron [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2090.290627] env[63024]: INFO nova.compute.manager [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Took 32.84 seconds to build instance. [ 2090.361409] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951713, 'name': CreateVM_Task, 'duration_secs': 0.699941} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2090.361638] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2090.362279] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2090.362446] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2090.362770] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2090.363038] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ee0a91d4-694d-45e8-b4ac-0a03c6f8db71 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.370978] env[63024]: DEBUG oslo_vmware.api [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Waiting for the task: (returnval){ [ 2090.370978] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]527d1319-195e-5eb2-c23f-3b89de8a8419" [ 2090.370978] env[63024]: _type = "Task" [ 2090.370978] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2090.381776] env[63024]: DEBUG oslo_vmware.api [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]527d1319-195e-5eb2-c23f-3b89de8a8419, 'name': SearchDatastore_Task, 'duration_secs': 0.010281} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2090.381776] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2090.381776] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2090.382019] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2090.382083] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2090.383411] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2090.383411] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dd255b67-28fc-4534-9044-8d18e221e6ab {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.392206] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2090.392404] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2090.393140] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0ffc87d-0fb3-4405-9dc5-92795f64471e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.399635] env[63024]: DEBUG oslo_vmware.api [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Waiting for the task: (returnval){ [ 2090.399635] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52cca850-3750-795b-9729-5576a92a1ce5" [ 2090.399635] env[63024]: _type = "Task" [ 2090.399635] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2090.410040] env[63024]: DEBUG oslo_vmware.api [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52cca850-3750-795b-9729-5576a92a1ce5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2090.452905] env[63024]: DEBUG nova.network.neutron [req-8b4ea7d7-d655-4c92-a350-8f2a20843995 req-1c2263d5-5a15-48ae-91de-c6a9236032c0 service nova] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] Updated VIF entry in instance network info cache for port 1ca53ff5-e854-46d6-ad35-04dc9c98d396. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2090.453441] env[63024]: DEBUG nova.network.neutron [req-8b4ea7d7-d655-4c92-a350-8f2a20843995 req-1c2263d5-5a15-48ae-91de-c6a9236032c0 service nova] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] Updating instance_info_cache with network_info: [{"id": "1ca53ff5-e854-46d6-ad35-04dc9c98d396", "address": "fa:16:3e:cb:53:a4", "network": {"id": "465c96ac-d4df-4c96-9ce2-b0e2a83919fb", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-182175607-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3cb7c7cd6c854c49835a36ed244d7abc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e49a26b5-7b6b-41fd-8bed-4cd9a6c1a002", "external-id": "nsx-vlan-transportzone-506", "segmentation_id": 506, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1ca53ff5-e8", "ovs_interfaceid": "1ca53ff5-e854-46d6-ad35-04dc9c98d396", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2090.482345] env[63024]: DEBUG oslo_vmware.api [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951715, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086027} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2090.482623] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2090.483473] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed2b11cc-7607-4d52-b223-6cef4a49f26c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.505824] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a/1666cff0-59bd-41a0-aa3c-d1e8fac3a49a.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2090.506437] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4b4a7dcc-7e43-4f9d-9c38-7a020849aa43 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.527768] env[63024]: DEBUG oslo_vmware.api [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 2090.527768] env[63024]: value = "task-1951717" [ 2090.527768] env[63024]: _type = "Task" [ 2090.527768] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2090.544137] env[63024]: DEBUG oslo_vmware.api [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951717, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2090.696842] env[63024]: ERROR nova.scheduler.client.report [None req-cfa53e02-76a5-4151-b762-d6b3070f505c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] [req-1ab89f54-a506-45da-9153-cd4b2d9ec705] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 89dfa68a-133e-436f-a9f1-86051f9fb96b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-1ab89f54-a506-45da-9153-cd4b2d9ec705"}]} [ 2090.713454] env[63024]: DEBUG nova.scheduler.client.report [None req-cfa53e02-76a5-4151-b762-d6b3070f505c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Refreshing inventories for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 2090.727894] env[63024]: DEBUG nova.scheduler.client.report [None req-cfa53e02-76a5-4151-b762-d6b3070f505c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Updating ProviderTree inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 2090.728143] env[63024]: DEBUG nova.compute.provider_tree [None req-cfa53e02-76a5-4151-b762-d6b3070f505c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2090.743363] env[63024]: DEBUG nova.scheduler.client.report [None req-cfa53e02-76a5-4151-b762-d6b3070f505c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Refreshing aggregate associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, aggregates: None {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 2090.762086] env[63024]: DEBUG nova.scheduler.client.report [None req-cfa53e02-76a5-4151-b762-d6b3070f505c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Refreshing trait associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 2090.794429] env[63024]: DEBUG nova.network.neutron [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2090.795721] env[63024]: DEBUG oslo_concurrency.lockutils [None req-242eaeac-9a14-428f-b399-f3d72e9a8bc4 tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Lock "2aa624cb-b36a-43c9-8407-37383f196563" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.360s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2090.917030] env[63024]: DEBUG oslo_vmware.api [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52cca850-3750-795b-9729-5576a92a1ce5, 'name': SearchDatastore_Task, 'duration_secs': 0.011163} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2090.917868] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ccf47233-cd03-4414-9107-a4f2df103236 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.927165] env[63024]: DEBUG oslo_vmware.api [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Waiting for the task: (returnval){ [ 2090.927165] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]525de6fb-a310-a17e-22b0-e718f6a9b022" [ 2090.927165] env[63024]: _type = "Task" [ 2090.927165] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2090.935964] env[63024]: DEBUG oslo_vmware.api [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]525de6fb-a310-a17e-22b0-e718f6a9b022, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2090.956279] env[63024]: DEBUG oslo_concurrency.lockutils [req-8b4ea7d7-d655-4c92-a350-8f2a20843995 req-1c2263d5-5a15-48ae-91de-c6a9236032c0 service nova] Releasing lock "refresh_cache-e4d6e79b-f110-44c2-8201-926b57eeb68d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2090.956783] env[63024]: DEBUG nova.compute.manager [req-8b4ea7d7-d655-4c92-a350-8f2a20843995 req-1c2263d5-5a15-48ae-91de-c6a9236032c0 service nova] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Received event network-changed-041c096f-ef1b-49ad-aadb-469b89fe4f25 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2090.956783] env[63024]: DEBUG nova.compute.manager [req-8b4ea7d7-d655-4c92-a350-8f2a20843995 req-1c2263d5-5a15-48ae-91de-c6a9236032c0 service nova] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Refreshing instance network info cache due to event network-changed-041c096f-ef1b-49ad-aadb-469b89fe4f25. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2090.957079] env[63024]: DEBUG oslo_concurrency.lockutils [req-8b4ea7d7-d655-4c92-a350-8f2a20843995 req-1c2263d5-5a15-48ae-91de-c6a9236032c0 service nova] Acquiring lock "refresh_cache-34e4db8e-e0d9-4a27-9368-c5e711b51a29" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2090.958042] env[63024]: DEBUG oslo_concurrency.lockutils [req-8b4ea7d7-d655-4c92-a350-8f2a20843995 req-1c2263d5-5a15-48ae-91de-c6a9236032c0 service nova] Acquired lock "refresh_cache-34e4db8e-e0d9-4a27-9368-c5e711b51a29" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2090.958042] env[63024]: DEBUG nova.network.neutron [req-8b4ea7d7-d655-4c92-a350-8f2a20843995 req-1c2263d5-5a15-48ae-91de-c6a9236032c0 service nova] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Refreshing network info cache for port 041c096f-ef1b-49ad-aadb-469b89fe4f25 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2090.978796] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4811e373-b8d5-4089-b939-28408ad3903c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2090.987241] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce1c6cd9-357c-4020-9950-9403c0f49dd1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.019566] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff749cf8-4d2e-4633-998d-4cae83e5d7f9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.025928] env[63024]: DEBUG nova.network.neutron [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Updating instance_info_cache with network_info: [{"id": "f5649c3a-8bab-4abb-a1a2-1d88b780eba2", "address": "fa:16:3e:a9:0a:72", "network": {"id": "27687c23-521d-4beb-ad4f-278994149c2c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-381619610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e53c02ad56640dc8cbc8839669b67bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "09bf081b-cdf0-4977-abe2-2339a87409ab", "external-id": "nsx-vlan-transportzone-378", "segmentation_id": 378, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5649c3a-8b", "ovs_interfaceid": "f5649c3a-8bab-4abb-a1a2-1d88b780eba2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2091.029777] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22c1218c-6e7b-4815-a9bb-a6fd7dbadeea {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.042066] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8c04b7ed-8cda-4b9a-b385-eb39c94235ad tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "881b1f35-206e-4c3f-bf7a-d1774a9343c2" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2091.042321] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8c04b7ed-8cda-4b9a-b385-eb39c94235ad tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "881b1f35-206e-4c3f-bf7a-d1774a9343c2" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2091.042547] env[63024]: INFO nova.compute.manager [None req-8c04b7ed-8cda-4b9a-b385-eb39c94235ad tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Attaching volume 6a38fd49-11fa-49bd-8905-34d2940181fb to /dev/sdb [ 2091.052999] env[63024]: DEBUG nova.compute.provider_tree [None req-cfa53e02-76a5-4151-b762-d6b3070f505c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2091.061051] env[63024]: DEBUG oslo_vmware.api [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951717, 'name': ReconfigVM_Task, 'duration_secs': 0.334409} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2091.061051] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Reconfigured VM instance instance-0000006b to attach disk [datastore1] 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a/1666cff0-59bd-41a0-aa3c-d1e8fac3a49a.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2091.061051] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1a2fcd95-e688-407e-856b-c88c5a93868d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.070035] env[63024]: DEBUG oslo_vmware.api [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 2091.070035] env[63024]: value = "task-1951718" [ 2091.070035] env[63024]: _type = "Task" [ 2091.070035] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2091.077510] env[63024]: DEBUG oslo_vmware.api [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951718, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2091.084637] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3251f6a-b781-438e-beb6-bea499ec3814 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.091593] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c37ff9a-6c26-4c58-acfc-0adc53de17d6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.107091] env[63024]: DEBUG nova.virt.block_device [None req-8c04b7ed-8cda-4b9a-b385-eb39c94235ad tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Updating existing volume attachment record: 9db4a815-c520-4b9d-ac36-d90d0d348625 {{(pid=63024) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2091.127099] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3d0d518a-61de-4d33-8149-46912ad13e0c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Acquiring lock "2aa624cb-b36a-43c9-8407-37383f196563" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2091.127401] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3d0d518a-61de-4d33-8149-46912ad13e0c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Lock "2aa624cb-b36a-43c9-8407-37383f196563" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2091.127727] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3d0d518a-61de-4d33-8149-46912ad13e0c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Acquiring lock "2aa624cb-b36a-43c9-8407-37383f196563-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2091.127996] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3d0d518a-61de-4d33-8149-46912ad13e0c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Lock "2aa624cb-b36a-43c9-8407-37383f196563-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2091.128270] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3d0d518a-61de-4d33-8149-46912ad13e0c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Lock "2aa624cb-b36a-43c9-8407-37383f196563-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2091.131712] env[63024]: INFO nova.compute.manager [None req-3d0d518a-61de-4d33-8149-46912ad13e0c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Terminating instance [ 2091.438883] env[63024]: DEBUG oslo_vmware.api [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]525de6fb-a310-a17e-22b0-e718f6a9b022, 'name': SearchDatastore_Task, 'duration_secs': 0.010482} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2091.439156] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2091.439416] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] e4d6e79b-f110-44c2-8201-926b57eeb68d/e4d6e79b-f110-44c2-8201-926b57eeb68d.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2091.439681] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b98106a7-fd7a-4b03-a871-383dfba72fdb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.447828] env[63024]: DEBUG oslo_vmware.api [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Waiting for the task: (returnval){ [ 2091.447828] env[63024]: value = "task-1951722" [ 2091.447828] env[63024]: _type = "Task" [ 2091.447828] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2091.456647] env[63024]: DEBUG oslo_vmware.api [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Task: {'id': task-1951722, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2091.529105] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Releasing lock "refresh_cache-cb038d54-b785-4930-b8a5-b309c5f4b58d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2091.529474] env[63024]: DEBUG nova.compute.manager [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Instance network_info: |[{"id": "f5649c3a-8bab-4abb-a1a2-1d88b780eba2", "address": "fa:16:3e:a9:0a:72", "network": {"id": "27687c23-521d-4beb-ad4f-278994149c2c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-381619610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e53c02ad56640dc8cbc8839669b67bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "09bf081b-cdf0-4977-abe2-2339a87409ab", "external-id": "nsx-vlan-transportzone-378", "segmentation_id": 378, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5649c3a-8b", "ovs_interfaceid": "f5649c3a-8bab-4abb-a1a2-1d88b780eba2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2091.529989] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a9:0a:72', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '09bf081b-cdf0-4977-abe2-2339a87409ab', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f5649c3a-8bab-4abb-a1a2-1d88b780eba2', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2091.540724] env[63024]: DEBUG oslo.service.loopingcall [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2091.541020] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2091.545106] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-28cd79b5-ad37-4e31-bc6c-c2605ecf5ba2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.571716] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2091.571716] env[63024]: value = "task-1951723" [ 2091.571716] env[63024]: _type = "Task" [ 2091.571716] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2091.587543] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951723, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2091.587865] env[63024]: DEBUG oslo_vmware.api [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951718, 'name': Rename_Task, 'duration_secs': 0.141899} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2091.588209] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2091.588475] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-85c71066-ba9d-4479-a34d-6e474e1b2c91 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.594722] env[63024]: DEBUG nova.scheduler.client.report [None req-cfa53e02-76a5-4151-b762-d6b3070f505c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Updated inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with generation 160 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 2091.594975] env[63024]: DEBUG nova.compute.provider_tree [None req-cfa53e02-76a5-4151-b762-d6b3070f505c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Updating resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b generation from 160 to 161 during operation: update_inventory {{(pid=63024) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2091.595181] env[63024]: DEBUG nova.compute.provider_tree [None req-cfa53e02-76a5-4151-b762-d6b3070f505c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 169, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2091.600227] env[63024]: DEBUG oslo_vmware.api [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 2091.600227] env[63024]: value = "task-1951724" [ 2091.600227] env[63024]: _type = "Task" [ 2091.600227] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2091.611482] env[63024]: DEBUG oslo_vmware.api [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951724, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2091.637161] env[63024]: DEBUG nova.compute.manager [None req-3d0d518a-61de-4d33-8149-46912ad13e0c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2091.637616] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3d0d518a-61de-4d33-8149-46912ad13e0c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2091.640966] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a3bd283-ab81-4549-a47d-6eaa070e2077 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.653459] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d0d518a-61de-4d33-8149-46912ad13e0c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2091.653825] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-db285007-895e-4ea4-9a8c-e86955f56046 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2091.663321] env[63024]: DEBUG oslo_vmware.api [None req-3d0d518a-61de-4d33-8149-46912ad13e0c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Waiting for the task: (returnval){ [ 2091.663321] env[63024]: value = "task-1951725" [ 2091.663321] env[63024]: _type = "Task" [ 2091.663321] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2091.677229] env[63024]: DEBUG oslo_vmware.api [None req-3d0d518a-61de-4d33-8149-46912ad13e0c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951725, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2091.704523] env[63024]: DEBUG oslo_concurrency.lockutils [None req-69d031ea-adc4-4758-8036-df5811676277 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquiring lock "ea24d375-ba88-42ca-a07e-52000ec613c0" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2091.704992] env[63024]: DEBUG oslo_concurrency.lockutils [None req-69d031ea-adc4-4758-8036-df5811676277 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "ea24d375-ba88-42ca-a07e-52000ec613c0" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2091.709234] env[63024]: DEBUG nova.compute.manager [req-47039f41-8f22-4872-9d83-7c19f7ab2eb4 req-6a1b2ffe-b725-49dc-85b6-16da2d579069 service nova] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Received event network-changed-f5649c3a-8bab-4abb-a1a2-1d88b780eba2 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2091.709386] env[63024]: DEBUG nova.compute.manager [req-47039f41-8f22-4872-9d83-7c19f7ab2eb4 req-6a1b2ffe-b725-49dc-85b6-16da2d579069 service nova] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Refreshing instance network info cache due to event network-changed-f5649c3a-8bab-4abb-a1a2-1d88b780eba2. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2091.709614] env[63024]: DEBUG oslo_concurrency.lockutils [req-47039f41-8f22-4872-9d83-7c19f7ab2eb4 req-6a1b2ffe-b725-49dc-85b6-16da2d579069 service nova] Acquiring lock "refresh_cache-cb038d54-b785-4930-b8a5-b309c5f4b58d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2091.709834] env[63024]: DEBUG oslo_concurrency.lockutils [req-47039f41-8f22-4872-9d83-7c19f7ab2eb4 req-6a1b2ffe-b725-49dc-85b6-16da2d579069 service nova] Acquired lock "refresh_cache-cb038d54-b785-4930-b8a5-b309c5f4b58d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2091.710037] env[63024]: DEBUG nova.network.neutron [req-47039f41-8f22-4872-9d83-7c19f7ab2eb4 req-6a1b2ffe-b725-49dc-85b6-16da2d579069 service nova] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Refreshing network info cache for port f5649c3a-8bab-4abb-a1a2-1d88b780eba2 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2091.819614] env[63024]: DEBUG nova.network.neutron [req-8b4ea7d7-d655-4c92-a350-8f2a20843995 req-1c2263d5-5a15-48ae-91de-c6a9236032c0 service nova] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Updated VIF entry in instance network info cache for port 041c096f-ef1b-49ad-aadb-469b89fe4f25. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2091.820075] env[63024]: DEBUG nova.network.neutron [req-8b4ea7d7-d655-4c92-a350-8f2a20843995 req-1c2263d5-5a15-48ae-91de-c6a9236032c0 service nova] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Updating instance_info_cache with network_info: [{"id": "041c096f-ef1b-49ad-aadb-469b89fe4f25", "address": "fa:16:3e:8b:4e:6d", "network": {"id": "ffb24eaf-c6b6-414f-a69a-0c8806712ddd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-281590202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9521048e807c4ca2a6d2e74a72b829a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap041c096f-ef", "ovs_interfaceid": "041c096f-ef1b-49ad-aadb-469b89fe4f25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2091.958730] env[63024]: DEBUG oslo_vmware.api [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Task: {'id': task-1951722, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2092.088102] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951723, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2092.101566] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cfa53e02-76a5-4151-b762-d6b3070f505c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.722s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2092.104044] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7c674c61-eb7f-40e8-868f-79e9a79fbcbf tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.922s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2092.104305] env[63024]: DEBUG nova.objects.instance [None req-7c674c61-eb7f-40e8-868f-79e9a79fbcbf tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Lazy-loading 'resources' on Instance uuid c5541241-84e2-4216-b6f9-4c716f29d759 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2092.114442] env[63024]: DEBUG oslo_vmware.api [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951724, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2092.128295] env[63024]: INFO nova.scheduler.client.report [None req-cfa53e02-76a5-4151-b762-d6b3070f505c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Deleted allocations for instance 92d1f96e-bbe7-4654-9d3a-47ba40057157 [ 2092.174331] env[63024]: DEBUG oslo_vmware.api [None req-3d0d518a-61de-4d33-8149-46912ad13e0c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951725, 'name': PowerOffVM_Task, 'duration_secs': 0.441417} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2092.174495] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d0d518a-61de-4d33-8149-46912ad13e0c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2092.174591] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3d0d518a-61de-4d33-8149-46912ad13e0c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2092.174949] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ab61b626-175a-456a-8844-a931eb5d3619 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.212996] env[63024]: INFO nova.compute.manager [None req-69d031ea-adc4-4758-8036-df5811676277 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Detaching volume 5739f499-39f5-4d5e-8a12-f4f608420e01 [ 2092.254204] env[63024]: INFO nova.virt.block_device [None req-69d031ea-adc4-4758-8036-df5811676277 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Attempting to driver detach volume 5739f499-39f5-4d5e-8a12-f4f608420e01 from mountpoint /dev/sdb [ 2092.254474] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-69d031ea-adc4-4758-8036-df5811676277 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Volume detach. Driver type: vmdk {{(pid=63024) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2092.254988] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-69d031ea-adc4-4758-8036-df5811676277 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402210', 'volume_id': '5739f499-39f5-4d5e-8a12-f4f608420e01', 'name': 'volume-5739f499-39f5-4d5e-8a12-f4f608420e01', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ea24d375-ba88-42ca-a07e-52000ec613c0', 'attached_at': '', 'detached_at': '', 'volume_id': '5739f499-39f5-4d5e-8a12-f4f608420e01', 'serial': '5739f499-39f5-4d5e-8a12-f4f608420e01'} {{(pid=63024) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2092.255582] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f267b1b-b41a-4648-86ce-40bc9e343793 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.279408] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b815f117-3250-4e3c-b82b-35c960bc3b08 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.288672] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8e2791e-7702-4eac-bc55-6f8232772e24 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.308479] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cd7e32f-c7e5-4182-8e8b-2d55fbfacd0a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.325785] env[63024]: DEBUG oslo_concurrency.lockutils [req-8b4ea7d7-d655-4c92-a350-8f2a20843995 req-1c2263d5-5a15-48ae-91de-c6a9236032c0 service nova] Releasing lock "refresh_cache-34e4db8e-e0d9-4a27-9368-c5e711b51a29" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2092.326077] env[63024]: DEBUG nova.compute.manager [req-8b4ea7d7-d655-4c92-a350-8f2a20843995 req-1c2263d5-5a15-48ae-91de-c6a9236032c0 service nova] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Received event network-changed-340baee8-fd68-482a-94ce-82df41470c62 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2092.326253] env[63024]: DEBUG nova.compute.manager [req-8b4ea7d7-d655-4c92-a350-8f2a20843995 req-1c2263d5-5a15-48ae-91de-c6a9236032c0 service nova] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Refreshing instance network info cache due to event network-changed-340baee8-fd68-482a-94ce-82df41470c62. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2092.326468] env[63024]: DEBUG oslo_concurrency.lockutils [req-8b4ea7d7-d655-4c92-a350-8f2a20843995 req-1c2263d5-5a15-48ae-91de-c6a9236032c0 service nova] Acquiring lock "refresh_cache-8826c266-659c-46ad-bb02-aefdffab8699" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2092.326609] env[63024]: DEBUG oslo_concurrency.lockutils [req-8b4ea7d7-d655-4c92-a350-8f2a20843995 req-1c2263d5-5a15-48ae-91de-c6a9236032c0 service nova] Acquired lock "refresh_cache-8826c266-659c-46ad-bb02-aefdffab8699" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2092.326770] env[63024]: DEBUG nova.network.neutron [req-8b4ea7d7-d655-4c92-a350-8f2a20843995 req-1c2263d5-5a15-48ae-91de-c6a9236032c0 service nova] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Refreshing network info cache for port 340baee8-fd68-482a-94ce-82df41470c62 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2092.328024] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-69d031ea-adc4-4758-8036-df5811676277 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] The volume has not been displaced from its original location: [datastore1] volume-5739f499-39f5-4d5e-8a12-f4f608420e01/volume-5739f499-39f5-4d5e-8a12-f4f608420e01.vmdk. No consolidation needed. {{(pid=63024) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2092.333207] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-69d031ea-adc4-4758-8036-df5811676277 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Reconfiguring VM instance instance-00000055 to detach disk 2001 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2092.336061] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d18f937d-8eea-4691-97a5-d6ee2424eca6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.356292] env[63024]: DEBUG oslo_vmware.api [None req-69d031ea-adc4-4758-8036-df5811676277 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 2092.356292] env[63024]: value = "task-1951728" [ 2092.356292] env[63024]: _type = "Task" [ 2092.356292] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2092.368102] env[63024]: DEBUG oslo_vmware.api [None req-69d031ea-adc4-4758-8036-df5811676277 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951728, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2092.462557] env[63024]: DEBUG oslo_vmware.api [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Task: {'id': task-1951722, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.51898} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2092.462854] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] e4d6e79b-f110-44c2-8201-926b57eeb68d/e4d6e79b-f110-44c2-8201-926b57eeb68d.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2092.463116] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2092.463408] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ed47a425-db66-4f0d-9cd2-ded02e189343 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.471067] env[63024]: DEBUG oslo_vmware.api [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Waiting for the task: (returnval){ [ 2092.471067] env[63024]: value = "task-1951729" [ 2092.471067] env[63024]: _type = "Task" [ 2092.471067] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2092.479356] env[63024]: DEBUG oslo_vmware.api [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Task: {'id': task-1951729, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2092.569695] env[63024]: DEBUG nova.network.neutron [req-47039f41-8f22-4872-9d83-7c19f7ab2eb4 req-6a1b2ffe-b725-49dc-85b6-16da2d579069 service nova] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Updated VIF entry in instance network info cache for port f5649c3a-8bab-4abb-a1a2-1d88b780eba2. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2092.570331] env[63024]: DEBUG nova.network.neutron [req-47039f41-8f22-4872-9d83-7c19f7ab2eb4 req-6a1b2ffe-b725-49dc-85b6-16da2d579069 service nova] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Updating instance_info_cache with network_info: [{"id": "f5649c3a-8bab-4abb-a1a2-1d88b780eba2", "address": "fa:16:3e:a9:0a:72", "network": {"id": "27687c23-521d-4beb-ad4f-278994149c2c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-381619610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e53c02ad56640dc8cbc8839669b67bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "09bf081b-cdf0-4977-abe2-2339a87409ab", "external-id": "nsx-vlan-transportzone-378", "segmentation_id": 378, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5649c3a-8b", "ovs_interfaceid": "f5649c3a-8bab-4abb-a1a2-1d88b780eba2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2092.587710] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951723, 'name': CreateVM_Task, 'duration_secs': 0.603396} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2092.587710] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2092.588555] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2092.588770] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2092.589148] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2092.589700] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f41b248d-cf07-45f8-a445-4d1deabf078c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.595875] env[63024]: DEBUG oslo_vmware.api [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2092.595875] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52dfbe78-ace5-81d4-3074-be140f46b04d" [ 2092.595875] env[63024]: _type = "Task" [ 2092.595875] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2092.607960] env[63024]: DEBUG oslo_vmware.api [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52dfbe78-ace5-81d4-3074-be140f46b04d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2092.620930] env[63024]: DEBUG oslo_vmware.api [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951724, 'name': PowerOnVM_Task, 'duration_secs': 0.70935} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2092.621425] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2092.621509] env[63024]: INFO nova.compute.manager [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Took 8.99 seconds to spawn the instance on the hypervisor. [ 2092.621687] env[63024]: DEBUG nova.compute.manager [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2092.622595] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0981856e-e808-4709-8f31-2ed215881594 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.639409] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cfa53e02-76a5-4151-b762-d6b3070f505c tempest-ServersNegativeTestJSON-1566661995 tempest-ServersNegativeTestJSON-1566661995-project-member] Lock "92d1f96e-bbe7-4654-9d3a-47ba40057157" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.630s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2092.674619] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3d0d518a-61de-4d33-8149-46912ad13e0c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2092.674985] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3d0d518a-61de-4d33-8149-46912ad13e0c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2092.675315] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d0d518a-61de-4d33-8149-46912ad13e0c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Deleting the datastore file [datastore1] 2aa624cb-b36a-43c9-8407-37383f196563 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2092.675927] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-29b4f80f-8997-4fa9-87ab-72a5410f134a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.687349] env[63024]: DEBUG oslo_vmware.api [None req-3d0d518a-61de-4d33-8149-46912ad13e0c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Waiting for the task: (returnval){ [ 2092.687349] env[63024]: value = "task-1951730" [ 2092.687349] env[63024]: _type = "Task" [ 2092.687349] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2092.703170] env[63024]: DEBUG oslo_vmware.api [None req-3d0d518a-61de-4d33-8149-46912ad13e0c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951730, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2092.834042] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33d7e959-f14f-4c6c-ac4f-d12c86c0e95d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.844141] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aea68927-b06e-4ef5-9a42-350b62f1b17c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.879182] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1696414c-8474-45f6-9af2-5716c8029b20 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.889957] env[63024]: DEBUG oslo_vmware.api [None req-69d031ea-adc4-4758-8036-df5811676277 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951728, 'name': ReconfigVM_Task, 'duration_secs': 0.386737} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2092.890333] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-69d031ea-adc4-4758-8036-df5811676277 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Reconfigured VM instance instance-00000055 to detach disk 2001 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2092.895762] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d18a365-4075-4527-8ce3-7837a2a5b6ce {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.899524] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c91faf2c-817d-4f0e-80dd-1ee19914b96a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.920807] env[63024]: DEBUG nova.compute.provider_tree [None req-7c674c61-eb7f-40e8-868f-79e9a79fbcbf tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2092.924214] env[63024]: DEBUG oslo_vmware.api [None req-69d031ea-adc4-4758-8036-df5811676277 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 2092.924214] env[63024]: value = "task-1951731" [ 2092.924214] env[63024]: _type = "Task" [ 2092.924214] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2092.933435] env[63024]: DEBUG oslo_vmware.api [None req-69d031ea-adc4-4758-8036-df5811676277 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951731, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2092.982212] env[63024]: DEBUG oslo_vmware.api [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Task: {'id': task-1951729, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086087} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2092.984796] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2092.985614] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0e04631-9c61-415a-b542-fba76b56e698 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.010310] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] e4d6e79b-f110-44c2-8201-926b57eeb68d/e4d6e79b-f110-44c2-8201-926b57eeb68d.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2093.010611] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6ce5a7af-a792-4aed-a43b-ebaa327ce5ea {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.031799] env[63024]: DEBUG oslo_vmware.api [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Waiting for the task: (returnval){ [ 2093.031799] env[63024]: value = "task-1951732" [ 2093.031799] env[63024]: _type = "Task" [ 2093.031799] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2093.040725] env[63024]: DEBUG oslo_vmware.api [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Task: {'id': task-1951732, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2093.073916] env[63024]: DEBUG oslo_concurrency.lockutils [req-47039f41-8f22-4872-9d83-7c19f7ab2eb4 req-6a1b2ffe-b725-49dc-85b6-16da2d579069 service nova] Releasing lock "refresh_cache-cb038d54-b785-4930-b8a5-b309c5f4b58d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2093.106725] env[63024]: DEBUG oslo_vmware.api [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52dfbe78-ace5-81d4-3074-be140f46b04d, 'name': SearchDatastore_Task, 'duration_secs': 0.017537} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2093.106909] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2093.107215] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2093.107731] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2093.107731] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2093.107731] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2093.108028] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c4930f4d-36fe-4ed9-9f0e-99ca0586e86e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.129925] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2093.130189] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2093.130901] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-57af91bd-06d8-4dae-a712-b7d5c3b1a07a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.140379] env[63024]: DEBUG oslo_vmware.api [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2093.140379] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52ba7959-1fa9-3bb2-e73d-b7bac57253c3" [ 2093.140379] env[63024]: _type = "Task" [ 2093.140379] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2093.148191] env[63024]: INFO nova.compute.manager [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Took 29.99 seconds to build instance. [ 2093.155320] env[63024]: DEBUG oslo_vmware.api [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52ba7959-1fa9-3bb2-e73d-b7bac57253c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2093.199430] env[63024]: DEBUG oslo_vmware.api [None req-3d0d518a-61de-4d33-8149-46912ad13e0c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Task: {'id': task-1951730, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.34648} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2093.199624] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d0d518a-61de-4d33-8149-46912ad13e0c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2093.199806] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3d0d518a-61de-4d33-8149-46912ad13e0c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2093.200077] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-3d0d518a-61de-4d33-8149-46912ad13e0c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2093.200452] env[63024]: INFO nova.compute.manager [None req-3d0d518a-61de-4d33-8149-46912ad13e0c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Took 1.56 seconds to destroy the instance on the hypervisor. [ 2093.200581] env[63024]: DEBUG oslo.service.loopingcall [None req-3d0d518a-61de-4d33-8149-46912ad13e0c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2093.200793] env[63024]: DEBUG nova.compute.manager [-] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2093.200910] env[63024]: DEBUG nova.network.neutron [-] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2093.347577] env[63024]: DEBUG nova.network.neutron [req-8b4ea7d7-d655-4c92-a350-8f2a20843995 req-1c2263d5-5a15-48ae-91de-c6a9236032c0 service nova] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Updated VIF entry in instance network info cache for port 340baee8-fd68-482a-94ce-82df41470c62. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2093.348108] env[63024]: DEBUG nova.network.neutron [req-8b4ea7d7-d655-4c92-a350-8f2a20843995 req-1c2263d5-5a15-48ae-91de-c6a9236032c0 service nova] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Updating instance_info_cache with network_info: [{"id": "340baee8-fd68-482a-94ce-82df41470c62", "address": "fa:16:3e:0c:e1:92", "network": {"id": "ffb24eaf-c6b6-414f-a69a-0c8806712ddd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-281590202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9521048e807c4ca2a6d2e74a72b829a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap340baee8-fd", "ovs_interfaceid": "340baee8-fd68-482a-94ce-82df41470c62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2093.435393] env[63024]: DEBUG oslo_vmware.api [None req-69d031ea-adc4-4758-8036-df5811676277 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951731, 'name': ReconfigVM_Task, 'duration_secs': 0.195225} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2093.435736] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-69d031ea-adc4-4758-8036-df5811676277 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402210', 'volume_id': '5739f499-39f5-4d5e-8a12-f4f608420e01', 'name': 'volume-5739f499-39f5-4d5e-8a12-f4f608420e01', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'ea24d375-ba88-42ca-a07e-52000ec613c0', 'attached_at': '', 'detached_at': '', 'volume_id': '5739f499-39f5-4d5e-8a12-f4f608420e01', 'serial': '5739f499-39f5-4d5e-8a12-f4f608420e01'} {{(pid=63024) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2093.455653] env[63024]: DEBUG nova.scheduler.client.report [None req-7c674c61-eb7f-40e8-868f-79e9a79fbcbf tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Updated inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with generation 161 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 2093.455937] env[63024]: DEBUG nova.compute.provider_tree [None req-7c674c61-eb7f-40e8-868f-79e9a79fbcbf tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Updating resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b generation from 161 to 162 during operation: update_inventory {{(pid=63024) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2093.456144] env[63024]: DEBUG nova.compute.provider_tree [None req-7c674c61-eb7f-40e8-868f-79e9a79fbcbf tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2093.545015] env[63024]: DEBUG oslo_vmware.api [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Task: {'id': task-1951732, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2093.651818] env[63024]: DEBUG oslo_concurrency.lockutils [None req-598893b3-10d6-496b-a5c4-dec8f294da63 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "1666cff0-59bd-41a0-aa3c-d1e8fac3a49a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.508s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2093.652223] env[63024]: DEBUG oslo_vmware.api [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52ba7959-1fa9-3bb2-e73d-b7bac57253c3, 'name': SearchDatastore_Task, 'duration_secs': 0.038059} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2093.652924] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3cd17be3-8dc3-46cd-bafb-104d15b36a91 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2093.658498] env[63024]: DEBUG oslo_vmware.api [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2093.658498] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5208560d-63ad-0a3f-27f3-9992b341b304" [ 2093.658498] env[63024]: _type = "Task" [ 2093.658498] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2093.666661] env[63024]: DEBUG oslo_vmware.api [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5208560d-63ad-0a3f-27f3-9992b341b304, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2093.850913] env[63024]: DEBUG oslo_concurrency.lockutils [req-8b4ea7d7-d655-4c92-a350-8f2a20843995 req-1c2263d5-5a15-48ae-91de-c6a9236032c0 service nova] Releasing lock "refresh_cache-8826c266-659c-46ad-bb02-aefdffab8699" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2093.851268] env[63024]: DEBUG nova.compute.manager [req-8b4ea7d7-d655-4c92-a350-8f2a20843995 req-1c2263d5-5a15-48ae-91de-c6a9236032c0 service nova] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Received event network-vif-plugged-f5649c3a-8bab-4abb-a1a2-1d88b780eba2 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2093.851477] env[63024]: DEBUG oslo_concurrency.lockutils [req-8b4ea7d7-d655-4c92-a350-8f2a20843995 req-1c2263d5-5a15-48ae-91de-c6a9236032c0 service nova] Acquiring lock "cb038d54-b785-4930-b8a5-b309c5f4b58d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2093.851691] env[63024]: DEBUG oslo_concurrency.lockutils [req-8b4ea7d7-d655-4c92-a350-8f2a20843995 req-1c2263d5-5a15-48ae-91de-c6a9236032c0 service nova] Lock "cb038d54-b785-4930-b8a5-b309c5f4b58d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2093.851861] env[63024]: DEBUG oslo_concurrency.lockutils [req-8b4ea7d7-d655-4c92-a350-8f2a20843995 req-1c2263d5-5a15-48ae-91de-c6a9236032c0 service nova] Lock "cb038d54-b785-4930-b8a5-b309c5f4b58d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2093.852041] env[63024]: DEBUG nova.compute.manager [req-8b4ea7d7-d655-4c92-a350-8f2a20843995 req-1c2263d5-5a15-48ae-91de-c6a9236032c0 service nova] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] No waiting events found dispatching network-vif-plugged-f5649c3a-8bab-4abb-a1a2-1d88b780eba2 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2093.852221] env[63024]: WARNING nova.compute.manager [req-8b4ea7d7-d655-4c92-a350-8f2a20843995 req-1c2263d5-5a15-48ae-91de-c6a9236032c0 service nova] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Received unexpected event network-vif-plugged-f5649c3a-8bab-4abb-a1a2-1d88b780eba2 for instance with vm_state building and task_state spawning. [ 2093.961170] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7c674c61-eb7f-40e8-868f-79e9a79fbcbf tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.857s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2093.964569] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5f8f5a55-4135-441d-ac19-73ea22cf7817 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.238s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2093.964569] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5f8f5a55-4135-441d-ac19-73ea22cf7817 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2093.985368] env[63024]: DEBUG nova.objects.instance [None req-69d031ea-adc4-4758-8036-df5811676277 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lazy-loading 'flavor' on Instance uuid ea24d375-ba88-42ca-a07e-52000ec613c0 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2093.987907] env[63024]: INFO nova.scheduler.client.report [None req-7c674c61-eb7f-40e8-868f-79e9a79fbcbf tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Deleted allocations for instance c5541241-84e2-4216-b6f9-4c716f29d759 [ 2093.993380] env[63024]: INFO nova.scheduler.client.report [None req-5f8f5a55-4135-441d-ac19-73ea22cf7817 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Deleted allocations for instance 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2 [ 2094.043370] env[63024]: DEBUG oslo_vmware.api [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Task: {'id': task-1951732, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2094.119614] env[63024]: DEBUG nova.compute.manager [req-6087280b-ce50-4689-93a9-4d766d58e74e req-37008924-affe-4b76-9ae4-9d8b617c91e5 service nova] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Received event network-vif-deleted-60d617ea-e957-4bd3-839b-2036b3433064 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2094.119825] env[63024]: INFO nova.compute.manager [req-6087280b-ce50-4689-93a9-4d766d58e74e req-37008924-affe-4b76-9ae4-9d8b617c91e5 service nova] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Neutron deleted interface 60d617ea-e957-4bd3-839b-2036b3433064; detaching it from the instance and deleting it from the info cache [ 2094.120187] env[63024]: DEBUG nova.network.neutron [req-6087280b-ce50-4689-93a9-4d766d58e74e req-37008924-affe-4b76-9ae4-9d8b617c91e5 service nova] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2094.170198] env[63024]: DEBUG oslo_vmware.api [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5208560d-63ad-0a3f-27f3-9992b341b304, 'name': SearchDatastore_Task, 'duration_secs': 0.029419} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2094.170465] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2094.170728] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] cb038d54-b785-4930-b8a5-b309c5f4b58d/cb038d54-b785-4930-b8a5-b309c5f4b58d.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2094.170988] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d8d060da-1d7d-4ba0-9b62-e214d0c6f5f6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.180179] env[63024]: DEBUG oslo_vmware.api [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2094.180179] env[63024]: value = "task-1951734" [ 2094.180179] env[63024]: _type = "Task" [ 2094.180179] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2094.189726] env[63024]: DEBUG oslo_vmware.api [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951734, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2094.234828] env[63024]: DEBUG nova.network.neutron [-] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2094.498486] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7c674c61-eb7f-40e8-868f-79e9a79fbcbf tempest-ServerShowV254Test-447667734 tempest-ServerShowV254Test-447667734-project-member] Lock "c5541241-84e2-4216-b6f9-4c716f29d759" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.789s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2094.502034] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5f8f5a55-4135-441d-ac19-73ea22cf7817 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "9716d592-32d1-4f1d-b42b-1c8a7d81d2f2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.681s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2094.546395] env[63024]: DEBUG oslo_vmware.api [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Task: {'id': task-1951732, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2094.622859] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6f6b4296-f990-4b0e-a615-e3846d3437d9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.639174] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a09658ef-f38d-4511-91f3-62a02b0fb701 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2094.692029] env[63024]: DEBUG nova.compute.manager [req-6087280b-ce50-4689-93a9-4d766d58e74e req-37008924-affe-4b76-9ae4-9d8b617c91e5 service nova] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Detach interface failed, port_id=60d617ea-e957-4bd3-839b-2036b3433064, reason: Instance 2aa624cb-b36a-43c9-8407-37383f196563 could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 2094.701539] env[63024]: DEBUG oslo_vmware.api [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951734, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2094.739177] env[63024]: INFO nova.compute.manager [-] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Took 1.54 seconds to deallocate network for instance. [ 2094.925282] env[63024]: DEBUG oslo_concurrency.lockutils [None req-30c7d1a6-4486-491a-a38b-85d0ddd10a47 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "interface-8826c266-659c-46ad-bb02-aefdffab8699-b0190de6-3c0b-430e-9952-40bdf36d8b58" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2094.927990] env[63024]: DEBUG oslo_concurrency.lockutils [None req-30c7d1a6-4486-491a-a38b-85d0ddd10a47 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "interface-8826c266-659c-46ad-bb02-aefdffab8699-b0190de6-3c0b-430e-9952-40bdf36d8b58" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2094.927990] env[63024]: DEBUG nova.objects.instance [None req-30c7d1a6-4486-491a-a38b-85d0ddd10a47 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lazy-loading 'flavor' on Instance uuid 8826c266-659c-46ad-bb02-aefdffab8699 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2094.980227] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c1452a46-1aea-4fd7-873e-1f5527309309 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "1666cff0-59bd-41a0-aa3c-d1e8fac3a49a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2094.980321] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c1452a46-1aea-4fd7-873e-1f5527309309 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "1666cff0-59bd-41a0-aa3c-d1e8fac3a49a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2094.980527] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c1452a46-1aea-4fd7-873e-1f5527309309 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "1666cff0-59bd-41a0-aa3c-d1e8fac3a49a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2094.980705] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c1452a46-1aea-4fd7-873e-1f5527309309 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "1666cff0-59bd-41a0-aa3c-d1e8fac3a49a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2094.980872] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c1452a46-1aea-4fd7-873e-1f5527309309 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "1666cff0-59bd-41a0-aa3c-d1e8fac3a49a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2094.983057] env[63024]: INFO nova.compute.manager [None req-c1452a46-1aea-4fd7-873e-1f5527309309 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Terminating instance [ 2095.002020] env[63024]: DEBUG oslo_concurrency.lockutils [None req-69d031ea-adc4-4758-8036-df5811676277 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "ea24d375-ba88-42ca-a07e-52000ec613c0" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.294s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2095.049335] env[63024]: DEBUG oslo_vmware.api [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Task: {'id': task-1951732, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2095.201025] env[63024]: DEBUG oslo_vmware.api [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951734, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.760125} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2095.201025] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] cb038d54-b785-4930-b8a5-b309c5f4b58d/cb038d54-b785-4930-b8a5-b309c5f4b58d.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2095.201025] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2095.201025] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d5b079d0-7ea9-4802-a524-20d3a3925b84 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.208937] env[63024]: DEBUG oslo_vmware.api [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2095.208937] env[63024]: value = "task-1951736" [ 2095.208937] env[63024]: _type = "Task" [ 2095.208937] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2095.220362] env[63024]: DEBUG oslo_vmware.api [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951736, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2095.247396] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3d0d518a-61de-4d33-8149-46912ad13e0c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2095.247396] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3d0d518a-61de-4d33-8149-46912ad13e0c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2095.247396] env[63024]: DEBUG nova.objects.instance [None req-3d0d518a-61de-4d33-8149-46912ad13e0c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Lazy-loading 'resources' on Instance uuid 2aa624cb-b36a-43c9-8407-37383f196563 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2095.487552] env[63024]: DEBUG nova.compute.manager [None req-c1452a46-1aea-4fd7-873e-1f5527309309 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2095.487865] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c1452a46-1aea-4fd7-873e-1f5527309309 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2095.489354] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7fe7540-8400-4787-8d26-d22a993a1b1e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.500745] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1452a46-1aea-4fd7-873e-1f5527309309 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2095.503434] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5dcf64c4-23c4-4659-bc71-23a07582b7e3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.510700] env[63024]: DEBUG oslo_vmware.api [None req-c1452a46-1aea-4fd7-873e-1f5527309309 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 2095.510700] env[63024]: value = "task-1951737" [ 2095.510700] env[63024]: _type = "Task" [ 2095.510700] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2095.527478] env[63024]: DEBUG oslo_vmware.api [None req-c1452a46-1aea-4fd7-873e-1f5527309309 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951737, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2095.552456] env[63024]: DEBUG oslo_vmware.api [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Task: {'id': task-1951732, 'name': ReconfigVM_Task, 'duration_secs': 2.080437} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2095.552868] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] Reconfigured VM instance instance-0000006c to attach disk [datastore1] e4d6e79b-f110-44c2-8201-926b57eeb68d/e4d6e79b-f110-44c2-8201-926b57eeb68d.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2095.553698] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a80ab821-2621-4347-a9ec-cad4dfaae0ba {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.563448] env[63024]: DEBUG oslo_vmware.api [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Waiting for the task: (returnval){ [ 2095.563448] env[63024]: value = "task-1951738" [ 2095.563448] env[63024]: _type = "Task" [ 2095.563448] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2095.574456] env[63024]: DEBUG oslo_vmware.api [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Task: {'id': task-1951738, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2095.627137] env[63024]: DEBUG nova.objects.instance [None req-30c7d1a6-4486-491a-a38b-85d0ddd10a47 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lazy-loading 'pci_requests' on Instance uuid 8826c266-659c-46ad-bb02-aefdffab8699 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2095.663866] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c04b7ed-8cda-4b9a-b385-eb39c94235ad tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Volume attach. Driver type: vmdk {{(pid=63024) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2095.664351] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c04b7ed-8cda-4b9a-b385-eb39c94235ad tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402246', 'volume_id': '6a38fd49-11fa-49bd-8905-34d2940181fb', 'name': 'volume-6a38fd49-11fa-49bd-8905-34d2940181fb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '881b1f35-206e-4c3f-bf7a-d1774a9343c2', 'attached_at': '', 'detached_at': '', 'volume_id': '6a38fd49-11fa-49bd-8905-34d2940181fb', 'serial': '6a38fd49-11fa-49bd-8905-34d2940181fb'} {{(pid=63024) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2095.665682] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0385c38a-7103-452c-b5ff-dd0859b38914 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.686597] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3e01541-4fc2-40e3-a856-ffe2c8633170 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.717603] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c04b7ed-8cda-4b9a-b385-eb39c94235ad tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] volume-6a38fd49-11fa-49bd-8905-34d2940181fb/volume-6a38fd49-11fa-49bd-8905-34d2940181fb.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2095.722695] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6d18e586-d7c2-43d0-a1ec-5a68abcc3cd4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.739340] env[63024]: DEBUG oslo_concurrency.lockutils [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "fa326fe2-c00e-4379-954a-9b3275328abc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2095.739626] env[63024]: DEBUG oslo_concurrency.lockutils [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "fa326fe2-c00e-4379-954a-9b3275328abc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2095.747320] env[63024]: DEBUG oslo_vmware.api [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951736, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.093927} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2095.748947] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2095.749417] env[63024]: DEBUG oslo_vmware.api [None req-8c04b7ed-8cda-4b9a-b385-eb39c94235ad tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2095.749417] env[63024]: value = "task-1951739" [ 2095.749417] env[63024]: _type = "Task" [ 2095.749417] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2095.750237] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9863d464-67cf-4402-83f5-4c229704f625 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.777867] env[63024]: DEBUG oslo_vmware.api [None req-8c04b7ed-8cda-4b9a-b385-eb39c94235ad tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951739, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2095.789165] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] cb038d54-b785-4930-b8a5-b309c5f4b58d/cb038d54-b785-4930-b8a5-b309c5f4b58d.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2095.792552] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c21b63de-e2a4-4d64-af4a-752f5da29385 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.820326] env[63024]: DEBUG oslo_vmware.api [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2095.820326] env[63024]: value = "task-1951740" [ 2095.820326] env[63024]: _type = "Task" [ 2095.820326] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2095.833461] env[63024]: DEBUG oslo_vmware.api [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951740, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2095.970065] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c706ff6-a098-42a1-8250-a429ed47f47c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2095.978798] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2d27a43-864d-4bc1-8211-971a7c8bb228 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.014926] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e93102d-db8d-464c-9e6b-6cdf2386c3d0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.023156] env[63024]: DEBUG oslo_vmware.api [None req-c1452a46-1aea-4fd7-873e-1f5527309309 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951737, 'name': PowerOffVM_Task, 'duration_secs': 0.250105} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2096.025364] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1452a46-1aea-4fd7-873e-1f5527309309 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2096.025558] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c1452a46-1aea-4fd7-873e-1f5527309309 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2096.025891] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a69aee7d-1d45-4cb9-b55f-420342ffc776 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.028321] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d464c070-bfe1-4277-86d5-29f880c3165e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.043651] env[63024]: DEBUG nova.compute.provider_tree [None req-3d0d518a-61de-4d33-8149-46912ad13e0c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2096.075465] env[63024]: DEBUG oslo_vmware.api [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Task: {'id': task-1951738, 'name': Rename_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2096.130827] env[63024]: DEBUG nova.objects.base [None req-30c7d1a6-4486-491a-a38b-85d0ddd10a47 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Object Instance<8826c266-659c-46ad-bb02-aefdffab8699> lazy-loaded attributes: flavor,pci_requests {{(pid=63024) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2096.131084] env[63024]: DEBUG nova.network.neutron [None req-30c7d1a6-4486-491a-a38b-85d0ddd10a47 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2096.198248] env[63024]: DEBUG nova.policy [None req-30c7d1a6-4486-491a-a38b-85d0ddd10a47 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6fc84a6eed984429b26a693ce7b0876e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9521048e807c4ca2a6d2e74a72b829a3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 2096.228176] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c1452a46-1aea-4fd7-873e-1f5527309309 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2096.228452] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c1452a46-1aea-4fd7-873e-1f5527309309 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2096.228730] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1452a46-1aea-4fd7-873e-1f5527309309 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Deleting the datastore file [datastore1] 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2096.229093] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5958fdb7-397e-40a5-bb49-4ac74fd25112 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.237718] env[63024]: DEBUG oslo_vmware.api [None req-c1452a46-1aea-4fd7-873e-1f5527309309 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 2096.237718] env[63024]: value = "task-1951742" [ 2096.237718] env[63024]: _type = "Task" [ 2096.237718] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2096.242890] env[63024]: DEBUG nova.compute.manager [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2096.248758] env[63024]: DEBUG oslo_vmware.api [None req-c1452a46-1aea-4fd7-873e-1f5527309309 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951742, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2096.263445] env[63024]: DEBUG oslo_vmware.api [None req-8c04b7ed-8cda-4b9a-b385-eb39c94235ad tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951739, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2096.316267] env[63024]: DEBUG oslo_concurrency.lockutils [None req-95c68b9d-bba8-4719-a943-7071e543b264 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquiring lock "ea24d375-ba88-42ca-a07e-52000ec613c0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2096.316267] env[63024]: DEBUG oslo_concurrency.lockutils [None req-95c68b9d-bba8-4719-a943-7071e543b264 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "ea24d375-ba88-42ca-a07e-52000ec613c0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2096.316267] env[63024]: DEBUG oslo_concurrency.lockutils [None req-95c68b9d-bba8-4719-a943-7071e543b264 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquiring lock "ea24d375-ba88-42ca-a07e-52000ec613c0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2096.316267] env[63024]: DEBUG oslo_concurrency.lockutils [None req-95c68b9d-bba8-4719-a943-7071e543b264 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "ea24d375-ba88-42ca-a07e-52000ec613c0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2096.316267] env[63024]: DEBUG oslo_concurrency.lockutils [None req-95c68b9d-bba8-4719-a943-7071e543b264 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "ea24d375-ba88-42ca-a07e-52000ec613c0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2096.317195] env[63024]: INFO nova.compute.manager [None req-95c68b9d-bba8-4719-a943-7071e543b264 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Terminating instance [ 2096.336991] env[63024]: DEBUG oslo_vmware.api [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951740, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2096.547043] env[63024]: DEBUG nova.scheduler.client.report [None req-3d0d518a-61de-4d33-8149-46912ad13e0c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2096.577834] env[63024]: DEBUG oslo_vmware.api [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Task: {'id': task-1951738, 'name': Rename_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2096.748390] env[63024]: DEBUG oslo_vmware.api [None req-c1452a46-1aea-4fd7-873e-1f5527309309 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951742, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.179124} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2096.748662] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1452a46-1aea-4fd7-873e-1f5527309309 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2096.748849] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c1452a46-1aea-4fd7-873e-1f5527309309 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2096.749034] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c1452a46-1aea-4fd7-873e-1f5527309309 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2096.749216] env[63024]: INFO nova.compute.manager [None req-c1452a46-1aea-4fd7-873e-1f5527309309 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Took 1.26 seconds to destroy the instance on the hypervisor. [ 2096.749454] env[63024]: DEBUG oslo.service.loopingcall [None req-c1452a46-1aea-4fd7-873e-1f5527309309 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2096.751771] env[63024]: DEBUG nova.compute.manager [-] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2096.751877] env[63024]: DEBUG nova.network.neutron [-] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2096.764615] env[63024]: DEBUG oslo_vmware.api [None req-8c04b7ed-8cda-4b9a-b385-eb39c94235ad tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951739, 'name': ReconfigVM_Task, 'duration_secs': 0.830774} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2096.764931] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c04b7ed-8cda-4b9a-b385-eb39c94235ad tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Reconfigured VM instance instance-00000069 to attach disk [datastore1] volume-6a38fd49-11fa-49bd-8905-34d2940181fb/volume-6a38fd49-11fa-49bd-8905-34d2940181fb.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2096.773510] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-baff9ecc-a7c6-465c-9545-aef53b7360f2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.784783] env[63024]: DEBUG oslo_concurrency.lockutils [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2096.802057] env[63024]: DEBUG oslo_vmware.api [None req-8c04b7ed-8cda-4b9a-b385-eb39c94235ad tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2096.802057] env[63024]: value = "task-1951744" [ 2096.802057] env[63024]: _type = "Task" [ 2096.802057] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2096.810626] env[63024]: DEBUG oslo_vmware.api [None req-8c04b7ed-8cda-4b9a-b385-eb39c94235ad tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951744, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2096.821460] env[63024]: DEBUG nova.compute.manager [None req-95c68b9d-bba8-4719-a943-7071e543b264 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2096.821746] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-95c68b9d-bba8-4719-a943-7071e543b264 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2096.822632] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f2d588c-ef9c-4d3c-a047-9be7ac800ee4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.834974] env[63024]: DEBUG oslo_vmware.api [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951740, 'name': ReconfigVM_Task, 'duration_secs': 0.722909} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2096.837562] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Reconfigured VM instance instance-0000006d to attach disk [datastore1] cb038d54-b785-4930-b8a5-b309c5f4b58d/cb038d54-b785-4930-b8a5-b309c5f4b58d.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2096.838265] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-95c68b9d-bba8-4719-a943-7071e543b264 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2096.838531] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-de33ed7c-8ff5-47f7-b644-41d36fcace8b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.840093] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a6323beb-6f29-47c6-98fb-b6d1fa8198ef {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2096.847465] env[63024]: DEBUG oslo_vmware.api [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2096.847465] env[63024]: value = "task-1951745" [ 2096.847465] env[63024]: _type = "Task" [ 2096.847465] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2096.848718] env[63024]: DEBUG oslo_vmware.api [None req-95c68b9d-bba8-4719-a943-7071e543b264 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 2096.848718] env[63024]: value = "task-1951746" [ 2096.848718] env[63024]: _type = "Task" [ 2096.848718] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2096.860776] env[63024]: DEBUG oslo_vmware.api [None req-95c68b9d-bba8-4719-a943-7071e543b264 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951746, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2096.864626] env[63024]: DEBUG oslo_vmware.api [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951745, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2097.052880] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3d0d518a-61de-4d33-8149-46912ad13e0c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.807s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2097.055465] env[63024]: DEBUG oslo_concurrency.lockutils [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.271s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2097.057449] env[63024]: INFO nova.compute.claims [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2097.079639] env[63024]: DEBUG oslo_vmware.api [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Task: {'id': task-1951738, 'name': Rename_Task, 'duration_secs': 1.220704} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2097.081088] env[63024]: INFO nova.scheduler.client.report [None req-3d0d518a-61de-4d33-8149-46912ad13e0c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Deleted allocations for instance 2aa624cb-b36a-43c9-8407-37383f196563 [ 2097.082297] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2097.085374] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1eb73c8e-b9ec-4233-a517-5ea7c847081b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.095083] env[63024]: DEBUG oslo_vmware.api [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Waiting for the task: (returnval){ [ 2097.095083] env[63024]: value = "task-1951747" [ 2097.095083] env[63024]: _type = "Task" [ 2097.095083] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2097.104937] env[63024]: DEBUG oslo_vmware.api [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Task: {'id': task-1951747, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2097.158669] env[63024]: DEBUG nova.compute.manager [req-7b0bf6c4-90cc-4636-b005-cf033ae6b797 req-fe496157-5fd3-41ac-823b-44a2ed7db048 service nova] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Received event network-vif-deleted-91603922-dbf0-46ad-86ff-46990886344a {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2097.159093] env[63024]: INFO nova.compute.manager [req-7b0bf6c4-90cc-4636-b005-cf033ae6b797 req-fe496157-5fd3-41ac-823b-44a2ed7db048 service nova] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Neutron deleted interface 91603922-dbf0-46ad-86ff-46990886344a; detaching it from the instance and deleting it from the info cache [ 2097.159395] env[63024]: DEBUG nova.network.neutron [req-7b0bf6c4-90cc-4636-b005-cf033ae6b797 req-fe496157-5fd3-41ac-823b-44a2ed7db048 service nova] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2097.312326] env[63024]: DEBUG oslo_vmware.api [None req-8c04b7ed-8cda-4b9a-b385-eb39c94235ad tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951744, 'name': ReconfigVM_Task, 'duration_secs': 0.171167} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2097.312590] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c04b7ed-8cda-4b9a-b385-eb39c94235ad tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402246', 'volume_id': '6a38fd49-11fa-49bd-8905-34d2940181fb', 'name': 'volume-6a38fd49-11fa-49bd-8905-34d2940181fb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '881b1f35-206e-4c3f-bf7a-d1774a9343c2', 'attached_at': '', 'detached_at': '', 'volume_id': '6a38fd49-11fa-49bd-8905-34d2940181fb', 'serial': '6a38fd49-11fa-49bd-8905-34d2940181fb'} {{(pid=63024) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2097.362017] env[63024]: DEBUG oslo_vmware.api [None req-95c68b9d-bba8-4719-a943-7071e543b264 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951746, 'name': PowerOffVM_Task, 'duration_secs': 0.245553} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2097.365034] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-95c68b9d-bba8-4719-a943-7071e543b264 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2097.365234] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-95c68b9d-bba8-4719-a943-7071e543b264 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2097.365576] env[63024]: DEBUG oslo_vmware.api [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951745, 'name': Rename_Task, 'duration_secs': 0.251314} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2097.365695] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0daba7d8-edd7-4b9d-8820-d1116f9a8fbf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.367199] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2097.367432] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0bb777fd-3c71-43df-a2a2-caf2c72ef15d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.375174] env[63024]: DEBUG oslo_vmware.api [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2097.375174] env[63024]: value = "task-1951749" [ 2097.375174] env[63024]: _type = "Task" [ 2097.375174] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2097.384544] env[63024]: DEBUG oslo_vmware.api [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951749, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2097.542244] env[63024]: DEBUG nova.network.neutron [-] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2097.591815] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3d0d518a-61de-4d33-8149-46912ad13e0c tempest-ImagesOneServerNegativeTestJSON-464690006 tempest-ImagesOneServerNegativeTestJSON-464690006-project-member] Lock "2aa624cb-b36a-43c9-8407-37383f196563" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.464s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2097.608023] env[63024]: DEBUG oslo_vmware.api [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Task: {'id': task-1951747, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2097.662820] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a181f395-c3c3-4338-a72e-c2f929958765 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.672797] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73e72be1-9cb4-48e6-ab0c-c843b68fe835 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.710315] env[63024]: DEBUG nova.compute.manager [req-7b0bf6c4-90cc-4636-b005-cf033ae6b797 req-fe496157-5fd3-41ac-823b-44a2ed7db048 service nova] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Detach interface failed, port_id=91603922-dbf0-46ad-86ff-46990886344a, reason: Instance 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 2097.809574] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-95c68b9d-bba8-4719-a943-7071e543b264 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2097.809876] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-95c68b9d-bba8-4719-a943-7071e543b264 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2097.810177] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-95c68b9d-bba8-4719-a943-7071e543b264 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Deleting the datastore file [datastore1] ea24d375-ba88-42ca-a07e-52000ec613c0 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2097.810494] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d6f39be1-8f21-45e6-b42f-8f97c09ae252 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2097.819495] env[63024]: DEBUG oslo_vmware.api [None req-95c68b9d-bba8-4719-a943-7071e543b264 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 2097.819495] env[63024]: value = "task-1951750" [ 2097.819495] env[63024]: _type = "Task" [ 2097.819495] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2097.828019] env[63024]: DEBUG oslo_vmware.api [None req-95c68b9d-bba8-4719-a943-7071e543b264 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951750, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2097.855360] env[63024]: DEBUG nova.network.neutron [None req-30c7d1a6-4486-491a-a38b-85d0ddd10a47 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Successfully updated port: b0190de6-3c0b-430e-9952-40bdf36d8b58 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2097.886841] env[63024]: DEBUG oslo_vmware.api [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951749, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2098.044945] env[63024]: INFO nova.compute.manager [-] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Took 1.29 seconds to deallocate network for instance. [ 2098.106651] env[63024]: DEBUG oslo_vmware.api [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Task: {'id': task-1951747, 'name': PowerOnVM_Task, 'duration_secs': 0.935845} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2098.109540] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2098.109627] env[63024]: INFO nova.compute.manager [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] Took 12.01 seconds to spawn the instance on the hypervisor. [ 2098.109807] env[63024]: DEBUG nova.compute.manager [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2098.110848] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8347d9b0-cc30-42b3-8bad-3453b1c614ba {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.255887] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c4c1a01-8606-454a-aa2c-b51d34572ee5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.267696] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a593978a-62a8-4da2-836e-328bd196137b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.300411] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-364e3f41-a30b-4d54-b5af-233fe15b37ea {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.309422] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90c34a00-def7-430c-b189-2819646b1b5a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.332637] env[63024]: DEBUG nova.compute.provider_tree [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2098.341652] env[63024]: DEBUG oslo_vmware.api [None req-95c68b9d-bba8-4719-a943-7071e543b264 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951750, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.147952} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2098.342014] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-95c68b9d-bba8-4719-a943-7071e543b264 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2098.342176] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-95c68b9d-bba8-4719-a943-7071e543b264 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2098.342373] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-95c68b9d-bba8-4719-a943-7071e543b264 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2098.342553] env[63024]: INFO nova.compute.manager [None req-95c68b9d-bba8-4719-a943-7071e543b264 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Took 1.52 seconds to destroy the instance on the hypervisor. [ 2098.342793] env[63024]: DEBUG oslo.service.loopingcall [None req-95c68b9d-bba8-4719-a943-7071e543b264 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2098.343349] env[63024]: DEBUG nova.compute.manager [-] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2098.343349] env[63024]: DEBUG nova.network.neutron [-] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2098.359185] env[63024]: DEBUG oslo_concurrency.lockutils [None req-30c7d1a6-4486-491a-a38b-85d0ddd10a47 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "refresh_cache-8826c266-659c-46ad-bb02-aefdffab8699" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2098.360449] env[63024]: DEBUG oslo_concurrency.lockutils [None req-30c7d1a6-4486-491a-a38b-85d0ddd10a47 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquired lock "refresh_cache-8826c266-659c-46ad-bb02-aefdffab8699" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2098.360449] env[63024]: DEBUG nova.network.neutron [None req-30c7d1a6-4486-491a-a38b-85d0ddd10a47 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2098.365943] env[63024]: DEBUG nova.objects.instance [None req-8c04b7ed-8cda-4b9a-b385-eb39c94235ad tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lazy-loading 'flavor' on Instance uuid 881b1f35-206e-4c3f-bf7a-d1774a9343c2 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2098.387327] env[63024]: DEBUG oslo_vmware.api [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951749, 'name': PowerOnVM_Task, 'duration_secs': 0.655978} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2098.387610] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2098.387811] env[63024]: INFO nova.compute.manager [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Took 9.86 seconds to spawn the instance on the hypervisor. [ 2098.387984] env[63024]: DEBUG nova.compute.manager [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2098.388930] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cfa548c-c46e-4eb1-9d67-8349a8c5846b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2098.551974] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c1452a46-1aea-4fd7-873e-1f5527309309 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2098.629022] env[63024]: INFO nova.compute.manager [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] Took 27.51 seconds to build instance. [ 2098.819772] env[63024]: DEBUG oslo_concurrency.lockutils [None req-574165ee-2a9c-4a11-ace5-904298f406ce tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "881b1f35-206e-4c3f-bf7a-d1774a9343c2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2098.838251] env[63024]: DEBUG nova.scheduler.client.report [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2098.873734] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8c04b7ed-8cda-4b9a-b385-eb39c94235ad tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "881b1f35-206e-4c3f-bf7a-d1774a9343c2" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.831s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2098.875239] env[63024]: DEBUG oslo_concurrency.lockutils [None req-574165ee-2a9c-4a11-ace5-904298f406ce tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "881b1f35-206e-4c3f-bf7a-d1774a9343c2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.056s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2098.875552] env[63024]: DEBUG oslo_concurrency.lockutils [None req-574165ee-2a9c-4a11-ace5-904298f406ce tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "881b1f35-206e-4c3f-bf7a-d1774a9343c2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2098.876635] env[63024]: DEBUG oslo_concurrency.lockutils [None req-574165ee-2a9c-4a11-ace5-904298f406ce tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "881b1f35-206e-4c3f-bf7a-d1774a9343c2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2098.877544] env[63024]: DEBUG oslo_concurrency.lockutils [None req-574165ee-2a9c-4a11-ace5-904298f406ce tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "881b1f35-206e-4c3f-bf7a-d1774a9343c2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2098.878660] env[63024]: INFO nova.compute.manager [None req-574165ee-2a9c-4a11-ace5-904298f406ce tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Terminating instance [ 2098.910661] env[63024]: INFO nova.compute.manager [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Took 26.18 seconds to build instance. [ 2098.913366] env[63024]: WARNING nova.network.neutron [None req-30c7d1a6-4486-491a-a38b-85d0ddd10a47 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] ffb24eaf-c6b6-414f-a69a-0c8806712ddd already exists in list: networks containing: ['ffb24eaf-c6b6-414f-a69a-0c8806712ddd']. ignoring it [ 2099.130766] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d04f16fc-2e45-4b89-885f-43c6a7560ce5 tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Lock "e4d6e79b-f110-44c2-8201-926b57eeb68d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.028s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2099.325357] env[63024]: DEBUG nova.compute.manager [req-49c23c8e-bc3c-4f5f-9300-e09c830733d6 req-dfab8856-aa43-49f0-9a0f-7be4ac1ee70f service nova] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Received event network-vif-plugged-b0190de6-3c0b-430e-9952-40bdf36d8b58 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2099.325568] env[63024]: DEBUG oslo_concurrency.lockutils [req-49c23c8e-bc3c-4f5f-9300-e09c830733d6 req-dfab8856-aa43-49f0-9a0f-7be4ac1ee70f service nova] Acquiring lock "8826c266-659c-46ad-bb02-aefdffab8699-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2099.325847] env[63024]: DEBUG oslo_concurrency.lockutils [req-49c23c8e-bc3c-4f5f-9300-e09c830733d6 req-dfab8856-aa43-49f0-9a0f-7be4ac1ee70f service nova] Lock "8826c266-659c-46ad-bb02-aefdffab8699-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2099.329041] env[63024]: DEBUG oslo_concurrency.lockutils [req-49c23c8e-bc3c-4f5f-9300-e09c830733d6 req-dfab8856-aa43-49f0-9a0f-7be4ac1ee70f service nova] Lock "8826c266-659c-46ad-bb02-aefdffab8699-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2099.329041] env[63024]: DEBUG nova.compute.manager [req-49c23c8e-bc3c-4f5f-9300-e09c830733d6 req-dfab8856-aa43-49f0-9a0f-7be4ac1ee70f service nova] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] No waiting events found dispatching network-vif-plugged-b0190de6-3c0b-430e-9952-40bdf36d8b58 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2099.329041] env[63024]: WARNING nova.compute.manager [req-49c23c8e-bc3c-4f5f-9300-e09c830733d6 req-dfab8856-aa43-49f0-9a0f-7be4ac1ee70f service nova] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Received unexpected event network-vif-plugged-b0190de6-3c0b-430e-9952-40bdf36d8b58 for instance with vm_state active and task_state None. [ 2099.329041] env[63024]: DEBUG nova.compute.manager [req-49c23c8e-bc3c-4f5f-9300-e09c830733d6 req-dfab8856-aa43-49f0-9a0f-7be4ac1ee70f service nova] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Received event network-changed-b0190de6-3c0b-430e-9952-40bdf36d8b58 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2099.329041] env[63024]: DEBUG nova.compute.manager [req-49c23c8e-bc3c-4f5f-9300-e09c830733d6 req-dfab8856-aa43-49f0-9a0f-7be4ac1ee70f service nova] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Refreshing instance network info cache due to event network-changed-b0190de6-3c0b-430e-9952-40bdf36d8b58. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2099.329041] env[63024]: DEBUG oslo_concurrency.lockutils [req-49c23c8e-bc3c-4f5f-9300-e09c830733d6 req-dfab8856-aa43-49f0-9a0f-7be4ac1ee70f service nova] Acquiring lock "refresh_cache-8826c266-659c-46ad-bb02-aefdffab8699" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2099.347619] env[63024]: DEBUG oslo_concurrency.lockutils [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.292s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2099.348158] env[63024]: DEBUG nova.compute.manager [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2099.353180] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c1452a46-1aea-4fd7-873e-1f5527309309 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.801s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2099.353379] env[63024]: DEBUG nova.objects.instance [None req-c1452a46-1aea-4fd7-873e-1f5527309309 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lazy-loading 'resources' on Instance uuid 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2099.381411] env[63024]: DEBUG nova.network.neutron [None req-30c7d1a6-4486-491a-a38b-85d0ddd10a47 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Updating instance_info_cache with network_info: [{"id": "340baee8-fd68-482a-94ce-82df41470c62", "address": "fa:16:3e:0c:e1:92", "network": {"id": "ffb24eaf-c6b6-414f-a69a-0c8806712ddd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-281590202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9521048e807c4ca2a6d2e74a72b829a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap340baee8-fd", "ovs_interfaceid": "340baee8-fd68-482a-94ce-82df41470c62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "b0190de6-3c0b-430e-9952-40bdf36d8b58", "address": "fa:16:3e:6e:6f:85", "network": {"id": "ffb24eaf-c6b6-414f-a69a-0c8806712ddd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-281590202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9521048e807c4ca2a6d2e74a72b829a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0190de6-3c", "ovs_interfaceid": "b0190de6-3c0b-430e-9952-40bdf36d8b58", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2099.384590] env[63024]: DEBUG nova.compute.manager [None req-574165ee-2a9c-4a11-ace5-904298f406ce tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2099.384590] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-574165ee-2a9c-4a11-ace5-904298f406ce tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2099.384590] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-82ac1a44-d60a-432d-913d-8ab0d4e9859b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.393673] env[63024]: DEBUG oslo_vmware.api [None req-574165ee-2a9c-4a11-ace5-904298f406ce tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2099.393673] env[63024]: value = "task-1951752" [ 2099.393673] env[63024]: _type = "Task" [ 2099.393673] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2099.405902] env[63024]: DEBUG oslo_vmware.api [None req-574165ee-2a9c-4a11-ace5-904298f406ce tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951752, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2099.416531] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1aa1b30a-58e4-48eb-96b4-c3a4c4d2bce6 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "cb038d54-b785-4930-b8a5-b309c5f4b58d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.699s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2099.568147] env[63024]: DEBUG nova.network.neutron [-] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2099.858359] env[63024]: DEBUG nova.compute.utils [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2099.864995] env[63024]: DEBUG nova.compute.manager [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2099.865196] env[63024]: DEBUG nova.network.neutron [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2099.885669] env[63024]: DEBUG oslo_concurrency.lockutils [None req-30c7d1a6-4486-491a-a38b-85d0ddd10a47 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Releasing lock "refresh_cache-8826c266-659c-46ad-bb02-aefdffab8699" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2099.886801] env[63024]: DEBUG oslo_concurrency.lockutils [None req-30c7d1a6-4486-491a-a38b-85d0ddd10a47 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "8826c266-659c-46ad-bb02-aefdffab8699" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2099.886965] env[63024]: DEBUG oslo_concurrency.lockutils [None req-30c7d1a6-4486-491a-a38b-85d0ddd10a47 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquired lock "8826c266-659c-46ad-bb02-aefdffab8699" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2099.887266] env[63024]: DEBUG oslo_concurrency.lockutils [req-49c23c8e-bc3c-4f5f-9300-e09c830733d6 req-dfab8856-aa43-49f0-9a0f-7be4ac1ee70f service nova] Acquired lock "refresh_cache-8826c266-659c-46ad-bb02-aefdffab8699" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2099.887453] env[63024]: DEBUG nova.network.neutron [req-49c23c8e-bc3c-4f5f-9300-e09c830733d6 req-dfab8856-aa43-49f0-9a0f-7be4ac1ee70f service nova] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Refreshing network info cache for port b0190de6-3c0b-430e-9952-40bdf36d8b58 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2099.890766] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a801304-2080-4289-b977-ac07875644dc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.917587] env[63024]: DEBUG nova.virt.hardware [None req-30c7d1a6-4486-491a-a38b-85d0ddd10a47 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2099.917851] env[63024]: DEBUG nova.virt.hardware [None req-30c7d1a6-4486-491a-a38b-85d0ddd10a47 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2099.918017] env[63024]: DEBUG nova.virt.hardware [None req-30c7d1a6-4486-491a-a38b-85d0ddd10a47 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2099.918205] env[63024]: DEBUG nova.virt.hardware [None req-30c7d1a6-4486-491a-a38b-85d0ddd10a47 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2099.918353] env[63024]: DEBUG nova.virt.hardware [None req-30c7d1a6-4486-491a-a38b-85d0ddd10a47 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2099.918501] env[63024]: DEBUG nova.virt.hardware [None req-30c7d1a6-4486-491a-a38b-85d0ddd10a47 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2099.918739] env[63024]: DEBUG nova.virt.hardware [None req-30c7d1a6-4486-491a-a38b-85d0ddd10a47 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2099.918946] env[63024]: DEBUG nova.virt.hardware [None req-30c7d1a6-4486-491a-a38b-85d0ddd10a47 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2099.919154] env[63024]: DEBUG nova.virt.hardware [None req-30c7d1a6-4486-491a-a38b-85d0ddd10a47 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2099.919555] env[63024]: DEBUG nova.virt.hardware [None req-30c7d1a6-4486-491a-a38b-85d0ddd10a47 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2099.919555] env[63024]: DEBUG nova.virt.hardware [None req-30c7d1a6-4486-491a-a38b-85d0ddd10a47 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2099.928160] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-30c7d1a6-4486-491a-a38b-85d0ddd10a47 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Reconfiguring VM to attach interface {{(pid=63024) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 2099.929394] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-31fad9a1-27d8-480d-9449-3cdd9281e0af {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.941483] env[63024]: DEBUG oslo_vmware.api [None req-574165ee-2a9c-4a11-ace5-904298f406ce tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951752, 'name': PowerOffVM_Task, 'duration_secs': 0.220724} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2099.944507] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-574165ee-2a9c-4a11-ace5-904298f406ce tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2099.944721] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-574165ee-2a9c-4a11-ace5-904298f406ce tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Volume detach. Driver type: vmdk {{(pid=63024) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2099.944911] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-574165ee-2a9c-4a11-ace5-904298f406ce tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402246', 'volume_id': '6a38fd49-11fa-49bd-8905-34d2940181fb', 'name': 'volume-6a38fd49-11fa-49bd-8905-34d2940181fb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '881b1f35-206e-4c3f-bf7a-d1774a9343c2', 'attached_at': '', 'detached_at': '', 'volume_id': '6a38fd49-11fa-49bd-8905-34d2940181fb', 'serial': '6a38fd49-11fa-49bd-8905-34d2940181fb'} {{(pid=63024) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2099.946393] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55d672d3-7ac6-433d-af26-a0bb24758208 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.952991] env[63024]: DEBUG oslo_vmware.api [None req-30c7d1a6-4486-491a-a38b-85d0ddd10a47 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 2099.952991] env[63024]: value = "task-1951753" [ 2099.952991] env[63024]: _type = "Task" [ 2099.952991] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2099.976073] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-564b375a-4b66-48a5-9988-f7687ed037a6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2099.984936] env[63024]: DEBUG oslo_vmware.api [None req-30c7d1a6-4486-491a-a38b-85d0ddd10a47 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951753, 'name': ReconfigVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2099.988056] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e0d9185-00f6-4935-9c1c-515023594d5f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2100.013530] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92d0e40b-c82f-4dd6-a34c-05725326a39b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2100.030845] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-574165ee-2a9c-4a11-ace5-904298f406ce tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] The volume has not been displaced from its original location: [datastore1] volume-6a38fd49-11fa-49bd-8905-34d2940181fb/volume-6a38fd49-11fa-49bd-8905-34d2940181fb.vmdk. No consolidation needed. {{(pid=63024) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2100.036585] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-574165ee-2a9c-4a11-ace5-904298f406ce tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Reconfiguring VM instance instance-00000069 to detach disk 2001 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2100.039538] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-163ef800-42b0-48f9-b379-6f5c7f55e166 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2100.052372] env[63024]: DEBUG oslo_concurrency.lockutils [None req-73be4c4a-e9da-44e7-aa1e-975c255c95ac tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Acquiring lock "e4d6e79b-f110-44c2-8201-926b57eeb68d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2100.052625] env[63024]: DEBUG oslo_concurrency.lockutils [None req-73be4c4a-e9da-44e7-aa1e-975c255c95ac tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Lock "e4d6e79b-f110-44c2-8201-926b57eeb68d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2100.052832] env[63024]: DEBUG oslo_concurrency.lockutils [None req-73be4c4a-e9da-44e7-aa1e-975c255c95ac tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Acquiring lock "e4d6e79b-f110-44c2-8201-926b57eeb68d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2100.053057] env[63024]: DEBUG oslo_concurrency.lockutils [None req-73be4c4a-e9da-44e7-aa1e-975c255c95ac tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Lock "e4d6e79b-f110-44c2-8201-926b57eeb68d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2100.053192] env[63024]: DEBUG oslo_concurrency.lockutils [None req-73be4c4a-e9da-44e7-aa1e-975c255c95ac tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Lock "e4d6e79b-f110-44c2-8201-926b57eeb68d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2100.055477] env[63024]: INFO nova.compute.manager [None req-73be4c4a-e9da-44e7-aa1e-975c255c95ac tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] Terminating instance [ 2100.063124] env[63024]: DEBUG oslo_vmware.api [None req-574165ee-2a9c-4a11-ace5-904298f406ce tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2100.063124] env[63024]: value = "task-1951754" [ 2100.063124] env[63024]: _type = "Task" [ 2100.063124] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2100.074130] env[63024]: INFO nova.compute.manager [-] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Took 1.73 seconds to deallocate network for instance. [ 2100.074470] env[63024]: DEBUG oslo_vmware.api [None req-574165ee-2a9c-4a11-ace5-904298f406ce tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951754, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2100.172559] env[63024]: DEBUG nova.policy [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '28fab1e92c1d4491986100983f6b4ab1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6072e8931d9540ad8fe4a2b4b1ec782d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 2100.191249] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d683dda-7077-4293-8b8e-d1cf270fd7e0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2100.202747] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1be170fc-783e-45ad-918c-87a01dab7814 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2100.240042] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56f05297-8879-4f3d-ad8e-fe63fa6d44fe {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2100.249165] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9cfcdb8-76e2-46c6-a44a-1278e2e49b0d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2100.264989] env[63024]: DEBUG nova.compute.provider_tree [None req-c1452a46-1aea-4fd7-873e-1f5527309309 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2100.366033] env[63024]: DEBUG nova.compute.manager [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2100.485435] env[63024]: DEBUG oslo_vmware.api [None req-30c7d1a6-4486-491a-a38b-85d0ddd10a47 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951753, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2100.560107] env[63024]: DEBUG nova.compute.manager [None req-73be4c4a-e9da-44e7-aa1e-975c255c95ac tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2100.560597] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-73be4c4a-e9da-44e7-aa1e-975c255c95ac tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2100.565051] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41640c00-440d-42e0-8528-8b56f9a17b41 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2100.579500] env[63024]: DEBUG oslo_vmware.api [None req-574165ee-2a9c-4a11-ace5-904298f406ce tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951754, 'name': ReconfigVM_Task, 'duration_secs': 0.209845} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2100.584802] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-574165ee-2a9c-4a11-ace5-904298f406ce tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Reconfigured VM instance instance-00000069 to detach disk 2001 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2100.592645] env[63024]: DEBUG oslo_concurrency.lockutils [None req-95c68b9d-bba8-4719-a943-7071e543b264 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2100.593149] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-73be4c4a-e9da-44e7-aa1e-975c255c95ac tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2100.594757] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e5aefd0c-f1d8-4072-8992-9ee641bfc413 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2100.607449] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9230bdb9-a714-48f8-a009-01f58eae2922 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2100.621591] env[63024]: DEBUG oslo_vmware.api [None req-574165ee-2a9c-4a11-ace5-904298f406ce tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2100.621591] env[63024]: value = "task-1951756" [ 2100.621591] env[63024]: _type = "Task" [ 2100.621591] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2100.623434] env[63024]: DEBUG oslo_vmware.api [None req-73be4c4a-e9da-44e7-aa1e-975c255c95ac tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Waiting for the task: (returnval){ [ 2100.623434] env[63024]: value = "task-1951755" [ 2100.623434] env[63024]: _type = "Task" [ 2100.623434] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2100.644337] env[63024]: DEBUG oslo_vmware.api [None req-73be4c4a-e9da-44e7-aa1e-975c255c95ac tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Task: {'id': task-1951755, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2100.649362] env[63024]: DEBUG oslo_vmware.api [None req-574165ee-2a9c-4a11-ace5-904298f406ce tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951756, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2100.707071] env[63024]: DEBUG nova.network.neutron [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Successfully created port: a13f8ab3-d900-447f-8772-5be6b3d48296 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2100.771023] env[63024]: DEBUG nova.scheduler.client.report [None req-c1452a46-1aea-4fd7-873e-1f5527309309 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2100.831181] env[63024]: DEBUG nova.network.neutron [req-49c23c8e-bc3c-4f5f-9300-e09c830733d6 req-dfab8856-aa43-49f0-9a0f-7be4ac1ee70f service nova] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Updated VIF entry in instance network info cache for port b0190de6-3c0b-430e-9952-40bdf36d8b58. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2100.831181] env[63024]: DEBUG nova.network.neutron [req-49c23c8e-bc3c-4f5f-9300-e09c830733d6 req-dfab8856-aa43-49f0-9a0f-7be4ac1ee70f service nova] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Updating instance_info_cache with network_info: [{"id": "340baee8-fd68-482a-94ce-82df41470c62", "address": "fa:16:3e:0c:e1:92", "network": {"id": "ffb24eaf-c6b6-414f-a69a-0c8806712ddd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-281590202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9521048e807c4ca2a6d2e74a72b829a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap340baee8-fd", "ovs_interfaceid": "340baee8-fd68-482a-94ce-82df41470c62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "b0190de6-3c0b-430e-9952-40bdf36d8b58", "address": "fa:16:3e:6e:6f:85", "network": {"id": "ffb24eaf-c6b6-414f-a69a-0c8806712ddd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-281590202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9521048e807c4ca2a6d2e74a72b829a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb0190de6-3c", "ovs_interfaceid": "b0190de6-3c0b-430e-9952-40bdf36d8b58", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2100.982424] env[63024]: DEBUG oslo_vmware.api [None req-30c7d1a6-4486-491a-a38b-85d0ddd10a47 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951753, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2101.136218] env[63024]: DEBUG oslo_vmware.api [None req-574165ee-2a9c-4a11-ace5-904298f406ce tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951756, 'name': ReconfigVM_Task, 'duration_secs': 0.165331} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2101.136218] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-574165ee-2a9c-4a11-ace5-904298f406ce tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402246', 'volume_id': '6a38fd49-11fa-49bd-8905-34d2940181fb', 'name': 'volume-6a38fd49-11fa-49bd-8905-34d2940181fb', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '881b1f35-206e-4c3f-bf7a-d1774a9343c2', 'attached_at': '', 'detached_at': '', 'volume_id': '6a38fd49-11fa-49bd-8905-34d2940181fb', 'serial': '6a38fd49-11fa-49bd-8905-34d2940181fb'} {{(pid=63024) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2101.136477] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-574165ee-2a9c-4a11-ace5-904298f406ce tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2101.137164] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47417e9d-660a-4981-b354-644b14bd71c9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.142725] env[63024]: DEBUG oslo_vmware.api [None req-73be4c4a-e9da-44e7-aa1e-975c255c95ac tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Task: {'id': task-1951755, 'name': PowerOffVM_Task, 'duration_secs': 0.217227} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2101.143088] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-73be4c4a-e9da-44e7-aa1e-975c255c95ac tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2101.143267] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-73be4c4a-e9da-44e7-aa1e-975c255c95ac tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2101.143567] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1324dd4f-db11-41b8-9a21-8fb73dd33f44 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.147101] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-574165ee-2a9c-4a11-ace5-904298f406ce tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2101.147323] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1914b804-6404-4349-a993-e22293d900b3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.279794] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c1452a46-1aea-4fd7-873e-1f5527309309 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.927s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2101.282254] env[63024]: DEBUG oslo_concurrency.lockutils [None req-95c68b9d-bba8-4719-a943-7071e543b264 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.690s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2101.282529] env[63024]: DEBUG nova.objects.instance [None req-95c68b9d-bba8-4719-a943-7071e543b264 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lazy-loading 'resources' on Instance uuid ea24d375-ba88-42ca-a07e-52000ec613c0 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2101.302075] env[63024]: INFO nova.scheduler.client.report [None req-c1452a46-1aea-4fd7-873e-1f5527309309 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Deleted allocations for instance 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a [ 2101.333724] env[63024]: DEBUG oslo_concurrency.lockutils [req-49c23c8e-bc3c-4f5f-9300-e09c830733d6 req-dfab8856-aa43-49f0-9a0f-7be4ac1ee70f service nova] Releasing lock "refresh_cache-8826c266-659c-46ad-bb02-aefdffab8699" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2101.335023] env[63024]: DEBUG nova.compute.manager [req-49c23c8e-bc3c-4f5f-9300-e09c830733d6 req-dfab8856-aa43-49f0-9a0f-7be4ac1ee70f service nova] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Received event network-vif-deleted-95e5d41a-5998-4d48-9aec-6255c74c448f {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2101.335316] env[63024]: INFO nova.compute.manager [req-49c23c8e-bc3c-4f5f-9300-e09c830733d6 req-dfab8856-aa43-49f0-9a0f-7be4ac1ee70f service nova] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Neutron deleted interface 95e5d41a-5998-4d48-9aec-6255c74c448f; detaching it from the instance and deleting it from the info cache [ 2101.335587] env[63024]: DEBUG nova.network.neutron [req-49c23c8e-bc3c-4f5f-9300-e09c830733d6 req-dfab8856-aa43-49f0-9a0f-7be4ac1ee70f service nova] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2101.356615] env[63024]: DEBUG nova.compute.manager [req-c0d5cdea-e47c-41da-9b33-f6fdfaf2991e req-bf51f1c7-334c-47be-90cc-2722bbca61cf service nova] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Received event network-changed-f5649c3a-8bab-4abb-a1a2-1d88b780eba2 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2101.356615] env[63024]: DEBUG nova.compute.manager [req-c0d5cdea-e47c-41da-9b33-f6fdfaf2991e req-bf51f1c7-334c-47be-90cc-2722bbca61cf service nova] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Refreshing instance network info cache due to event network-changed-f5649c3a-8bab-4abb-a1a2-1d88b780eba2. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2101.356840] env[63024]: DEBUG oslo_concurrency.lockutils [req-c0d5cdea-e47c-41da-9b33-f6fdfaf2991e req-bf51f1c7-334c-47be-90cc-2722bbca61cf service nova] Acquiring lock "refresh_cache-cb038d54-b785-4930-b8a5-b309c5f4b58d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2101.356903] env[63024]: DEBUG oslo_concurrency.lockutils [req-c0d5cdea-e47c-41da-9b33-f6fdfaf2991e req-bf51f1c7-334c-47be-90cc-2722bbca61cf service nova] Acquired lock "refresh_cache-cb038d54-b785-4930-b8a5-b309c5f4b58d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2101.357044] env[63024]: DEBUG nova.network.neutron [req-c0d5cdea-e47c-41da-9b33-f6fdfaf2991e req-bf51f1c7-334c-47be-90cc-2722bbca61cf service nova] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Refreshing network info cache for port f5649c3a-8bab-4abb-a1a2-1d88b780eba2 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2101.381441] env[63024]: DEBUG nova.compute.manager [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2101.408527] env[63024]: DEBUG nova.virt.hardware [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2101.408785] env[63024]: DEBUG nova.virt.hardware [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2101.408941] env[63024]: DEBUG nova.virt.hardware [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2101.409165] env[63024]: DEBUG nova.virt.hardware [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2101.409279] env[63024]: DEBUG nova.virt.hardware [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2101.409427] env[63024]: DEBUG nova.virt.hardware [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2101.409639] env[63024]: DEBUG nova.virt.hardware [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2101.409797] env[63024]: DEBUG nova.virt.hardware [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2101.409965] env[63024]: DEBUG nova.virt.hardware [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2101.410144] env[63024]: DEBUG nova.virt.hardware [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2101.410318] env[63024]: DEBUG nova.virt.hardware [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2101.411193] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a645ac7c-ea54-4b31-8948-c26bcfec03c9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.419714] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71d6fd3f-b6f3-4884-a7f9-75fe02da03a4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.480226] env[63024]: DEBUG oslo_vmware.api [None req-30c7d1a6-4486-491a-a38b-85d0ddd10a47 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951753, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2101.637453] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-73be4c4a-e9da-44e7-aa1e-975c255c95ac tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2101.637673] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-73be4c4a-e9da-44e7-aa1e-975c255c95ac tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2101.637859] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-73be4c4a-e9da-44e7-aa1e-975c255c95ac tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Deleting the datastore file [datastore1] e4d6e79b-f110-44c2-8201-926b57eeb68d {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2101.639201] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-19d257ea-db44-4479-b09a-c56c7985eef5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.641680] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-574165ee-2a9c-4a11-ace5-904298f406ce tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2101.641874] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-574165ee-2a9c-4a11-ace5-904298f406ce tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2101.642054] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-574165ee-2a9c-4a11-ace5-904298f406ce tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Deleting the datastore file [datastore1] 881b1f35-206e-4c3f-bf7a-d1774a9343c2 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2101.642322] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-db573f58-00f0-490a-85ef-982af061e48d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.649532] env[63024]: DEBUG oslo_vmware.api [None req-73be4c4a-e9da-44e7-aa1e-975c255c95ac tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Waiting for the task: (returnval){ [ 2101.649532] env[63024]: value = "task-1951759" [ 2101.649532] env[63024]: _type = "Task" [ 2101.649532] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2101.650949] env[63024]: DEBUG oslo_vmware.api [None req-574165ee-2a9c-4a11-ace5-904298f406ce tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2101.650949] env[63024]: value = "task-1951760" [ 2101.650949] env[63024]: _type = "Task" [ 2101.650949] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2101.662276] env[63024]: DEBUG oslo_vmware.api [None req-73be4c4a-e9da-44e7-aa1e-975c255c95ac tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Task: {'id': task-1951759, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2101.665236] env[63024]: DEBUG oslo_vmware.api [None req-574165ee-2a9c-4a11-ace5-904298f406ce tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951760, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2101.810426] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c1452a46-1aea-4fd7-873e-1f5527309309 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "1666cff0-59bd-41a0-aa3c-d1e8fac3a49a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.830s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2101.838427] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e8b5f119-4564-478b-86ff-b4d2fc1ac987 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.850720] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0a35b3f-87c3-4b44-abd4-453ae9625529 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.886432] env[63024]: DEBUG nova.compute.manager [req-49c23c8e-bc3c-4f5f-9300-e09c830733d6 req-dfab8856-aa43-49f0-9a0f-7be4ac1ee70f service nova] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Detach interface failed, port_id=95e5d41a-5998-4d48-9aec-6255c74c448f, reason: Instance ea24d375-ba88-42ca-a07e-52000ec613c0 could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 2101.947519] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bb9e550-2f0e-40d0-9ebf-e8e57e38c409 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.957434] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-466502dc-903a-4911-b4f5-497d669783ef {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2101.991542] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f4882d5-5941-4f91-9492-fd1266604592 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.001412] env[63024]: DEBUG oslo_vmware.api [None req-30c7d1a6-4486-491a-a38b-85d0ddd10a47 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951753, 'name': ReconfigVM_Task, 'duration_secs': 1.759103} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2102.001984] env[63024]: DEBUG oslo_concurrency.lockutils [None req-30c7d1a6-4486-491a-a38b-85d0ddd10a47 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Releasing lock "8826c266-659c-46ad-bb02-aefdffab8699" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2102.002230] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-30c7d1a6-4486-491a-a38b-85d0ddd10a47 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Reconfigured VM to attach interface {{(pid=63024) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 2102.005670] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18a312d9-f10c-405d-923c-446bda44311f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2102.023377] env[63024]: DEBUG nova.compute.provider_tree [None req-95c68b9d-bba8-4719-a943-7071e543b264 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2102.868020] env[63024]: DEBUG nova.network.neutron [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Successfully updated port: a13f8ab3-d900-447f-8772-5be6b3d48296 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2102.872364] env[63024]: DEBUG oslo_concurrency.lockutils [None req-30c7d1a6-4486-491a-a38b-85d0ddd10a47 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "interface-8826c266-659c-46ad-bb02-aefdffab8699-b0190de6-3c0b-430e-9952-40bdf36d8b58" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.946s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2102.873697] env[63024]: DEBUG nova.scheduler.client.report [None req-95c68b9d-bba8-4719-a943-7071e543b264 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2102.882985] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "a483e6b5-a192-4cfe-be36-1ce0667f5697" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2102.883403] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "a483e6b5-a192-4cfe-be36-1ce0667f5697" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2102.889558] env[63024]: DEBUG oslo_vmware.api [None req-73be4c4a-e9da-44e7-aa1e-975c255c95ac tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Task: {'id': task-1951759, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.392887} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2102.890024] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-73be4c4a-e9da-44e7-aa1e-975c255c95ac tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2102.890211] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-73be4c4a-e9da-44e7-aa1e-975c255c95ac tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2102.890501] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-73be4c4a-e9da-44e7-aa1e-975c255c95ac tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2102.890570] env[63024]: INFO nova.compute.manager [None req-73be4c4a-e9da-44e7-aa1e-975c255c95ac tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] Took 2.33 seconds to destroy the instance on the hypervisor. [ 2102.890843] env[63024]: DEBUG oslo.service.loopingcall [None req-73be4c4a-e9da-44e7-aa1e-975c255c95ac tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2102.894367] env[63024]: DEBUG nova.compute.manager [-] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2102.894489] env[63024]: DEBUG nova.network.neutron [-] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2102.896523] env[63024]: DEBUG oslo_vmware.api [None req-574165ee-2a9c-4a11-ace5-904298f406ce tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951760, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.379092} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2102.896763] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-574165ee-2a9c-4a11-ace5-904298f406ce tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2102.896950] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-574165ee-2a9c-4a11-ace5-904298f406ce tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2102.897139] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-574165ee-2a9c-4a11-ace5-904298f406ce tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2102.897311] env[63024]: INFO nova.compute.manager [None req-574165ee-2a9c-4a11-ace5-904298f406ce tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Took 3.51 seconds to destroy the instance on the hypervisor. [ 2102.897527] env[63024]: DEBUG oslo.service.loopingcall [None req-574165ee-2a9c-4a11-ace5-904298f406ce tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2102.897975] env[63024]: DEBUG nova.compute.manager [-] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2102.898083] env[63024]: DEBUG nova.network.neutron [-] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2102.991355] env[63024]: DEBUG nova.network.neutron [req-c0d5cdea-e47c-41da-9b33-f6fdfaf2991e req-bf51f1c7-334c-47be-90cc-2722bbca61cf service nova] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Updated VIF entry in instance network info cache for port f5649c3a-8bab-4abb-a1a2-1d88b780eba2. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2102.991727] env[63024]: DEBUG nova.network.neutron [req-c0d5cdea-e47c-41da-9b33-f6fdfaf2991e req-bf51f1c7-334c-47be-90cc-2722bbca61cf service nova] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Updating instance_info_cache with network_info: [{"id": "f5649c3a-8bab-4abb-a1a2-1d88b780eba2", "address": "fa:16:3e:a9:0a:72", "network": {"id": "27687c23-521d-4beb-ad4f-278994149c2c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-381619610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.138", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e53c02ad56640dc8cbc8839669b67bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "09bf081b-cdf0-4977-abe2-2339a87409ab", "external-id": "nsx-vlan-transportzone-378", "segmentation_id": 378, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5649c3a-8b", "ovs_interfaceid": "f5649c3a-8bab-4abb-a1a2-1d88b780eba2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2103.228781] env[63024]: DEBUG oslo_concurrency.lockutils [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "3f350c3e-e9b3-4798-a424-fd32235d21cf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2103.229041] env[63024]: DEBUG oslo_concurrency.lockutils [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "3f350c3e-e9b3-4798-a424-fd32235d21cf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2103.380549] env[63024]: DEBUG oslo_concurrency.lockutils [None req-95c68b9d-bba8-4719-a943-7071e543b264 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.098s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2103.384113] env[63024]: DEBUG oslo_concurrency.lockutils [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "refresh_cache-fa326fe2-c00e-4379-954a-9b3275328abc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2103.384344] env[63024]: DEBUG oslo_concurrency.lockutils [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquired lock "refresh_cache-fa326fe2-c00e-4379-954a-9b3275328abc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2103.384579] env[63024]: DEBUG nova.network.neutron [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2103.391269] env[63024]: DEBUG nova.compute.manager [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2103.398443] env[63024]: DEBUG nova.compute.manager [req-b680d55e-fa1c-4506-b7e5-2f387ae8387e req-7179e185-8320-43e4-812c-f0c1b34665bb service nova] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Received event network-vif-plugged-a13f8ab3-d900-447f-8772-5be6b3d48296 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2103.399131] env[63024]: DEBUG oslo_concurrency.lockutils [req-b680d55e-fa1c-4506-b7e5-2f387ae8387e req-7179e185-8320-43e4-812c-f0c1b34665bb service nova] Acquiring lock "fa326fe2-c00e-4379-954a-9b3275328abc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2103.399131] env[63024]: DEBUG oslo_concurrency.lockutils [req-b680d55e-fa1c-4506-b7e5-2f387ae8387e req-7179e185-8320-43e4-812c-f0c1b34665bb service nova] Lock "fa326fe2-c00e-4379-954a-9b3275328abc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2103.399383] env[63024]: DEBUG oslo_concurrency.lockutils [req-b680d55e-fa1c-4506-b7e5-2f387ae8387e req-7179e185-8320-43e4-812c-f0c1b34665bb service nova] Lock "fa326fe2-c00e-4379-954a-9b3275328abc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2103.400235] env[63024]: DEBUG nova.compute.manager [req-b680d55e-fa1c-4506-b7e5-2f387ae8387e req-7179e185-8320-43e4-812c-f0c1b34665bb service nova] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] No waiting events found dispatching network-vif-plugged-a13f8ab3-d900-447f-8772-5be6b3d48296 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2103.400235] env[63024]: WARNING nova.compute.manager [req-b680d55e-fa1c-4506-b7e5-2f387ae8387e req-7179e185-8320-43e4-812c-f0c1b34665bb service nova] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Received unexpected event network-vif-plugged-a13f8ab3-d900-447f-8772-5be6b3d48296 for instance with vm_state building and task_state spawning. [ 2103.400235] env[63024]: DEBUG nova.compute.manager [req-b680d55e-fa1c-4506-b7e5-2f387ae8387e req-7179e185-8320-43e4-812c-f0c1b34665bb service nova] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Received event network-changed-a13f8ab3-d900-447f-8772-5be6b3d48296 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2103.400445] env[63024]: DEBUG nova.compute.manager [req-b680d55e-fa1c-4506-b7e5-2f387ae8387e req-7179e185-8320-43e4-812c-f0c1b34665bb service nova] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Refreshing instance network info cache due to event network-changed-a13f8ab3-d900-447f-8772-5be6b3d48296. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2103.400708] env[63024]: DEBUG oslo_concurrency.lockutils [req-b680d55e-fa1c-4506-b7e5-2f387ae8387e req-7179e185-8320-43e4-812c-f0c1b34665bb service nova] Acquiring lock "refresh_cache-fa326fe2-c00e-4379-954a-9b3275328abc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2103.411690] env[63024]: INFO nova.scheduler.client.report [None req-95c68b9d-bba8-4719-a943-7071e543b264 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Deleted allocations for instance ea24d375-ba88-42ca-a07e-52000ec613c0 [ 2103.494553] env[63024]: DEBUG oslo_concurrency.lockutils [req-c0d5cdea-e47c-41da-9b33-f6fdfaf2991e req-bf51f1c7-334c-47be-90cc-2722bbca61cf service nova] Releasing lock "refresh_cache-cb038d54-b785-4930-b8a5-b309c5f4b58d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2103.731373] env[63024]: DEBUG nova.compute.manager [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2103.842516] env[63024]: DEBUG nova.network.neutron [-] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2103.895249] env[63024]: DEBUG nova.network.neutron [-] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2103.912856] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2103.913153] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2103.915149] env[63024]: INFO nova.compute.claims [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2103.920710] env[63024]: DEBUG oslo_concurrency.lockutils [None req-95c68b9d-bba8-4719-a943-7071e543b264 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "ea24d375-ba88-42ca-a07e-52000ec613c0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.606s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2103.922367] env[63024]: DEBUG nova.network.neutron [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2104.064243] env[63024]: DEBUG nova.network.neutron [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Updating instance_info_cache with network_info: [{"id": "a13f8ab3-d900-447f-8772-5be6b3d48296", "address": "fa:16:3e:47:7f:73", "network": {"id": "0cbe22f7-6322-4d92-9a77-2753f6449a2d", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-366684254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6072e8931d9540ad8fe4a2b4b1ec782d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3ccbdbb-8b49-4a26-913f-2a448b72280f", "external-id": "nsx-vlan-transportzone-412", "segmentation_id": 412, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa13f8ab3-d9", "ovs_interfaceid": "a13f8ab3-d900-447f-8772-5be6b3d48296", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2104.135590] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e30a480c-f0cc-4e46-8bb1-0756e646ed2e tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "interface-8826c266-659c-46ad-bb02-aefdffab8699-b0190de6-3c0b-430e-9952-40bdf36d8b58" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2104.136075] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e30a480c-f0cc-4e46-8bb1-0756e646ed2e tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "interface-8826c266-659c-46ad-bb02-aefdffab8699-b0190de6-3c0b-430e-9952-40bdf36d8b58" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2104.251229] env[63024]: DEBUG oslo_concurrency.lockutils [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2104.346135] env[63024]: INFO nova.compute.manager [-] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Took 1.45 seconds to deallocate network for instance. [ 2104.399742] env[63024]: INFO nova.compute.manager [-] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] Took 1.51 seconds to deallocate network for instance. [ 2104.566375] env[63024]: DEBUG oslo_concurrency.lockutils [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Releasing lock "refresh_cache-fa326fe2-c00e-4379-954a-9b3275328abc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2104.566486] env[63024]: DEBUG nova.compute.manager [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Instance network_info: |[{"id": "a13f8ab3-d900-447f-8772-5be6b3d48296", "address": "fa:16:3e:47:7f:73", "network": {"id": "0cbe22f7-6322-4d92-9a77-2753f6449a2d", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-366684254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6072e8931d9540ad8fe4a2b4b1ec782d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3ccbdbb-8b49-4a26-913f-2a448b72280f", "external-id": "nsx-vlan-transportzone-412", "segmentation_id": 412, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa13f8ab3-d9", "ovs_interfaceid": "a13f8ab3-d900-447f-8772-5be6b3d48296", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2104.567125] env[63024]: DEBUG oslo_concurrency.lockutils [req-b680d55e-fa1c-4506-b7e5-2f387ae8387e req-7179e185-8320-43e4-812c-f0c1b34665bb service nova] Acquired lock "refresh_cache-fa326fe2-c00e-4379-954a-9b3275328abc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2104.567125] env[63024]: DEBUG nova.network.neutron [req-b680d55e-fa1c-4506-b7e5-2f387ae8387e req-7179e185-8320-43e4-812c-f0c1b34665bb service nova] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Refreshing network info cache for port a13f8ab3-d900-447f-8772-5be6b3d48296 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2104.568216] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:47:7f:73', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f3ccbdbb-8b49-4a26-913f-2a448b72280f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a13f8ab3-d900-447f-8772-5be6b3d48296', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2104.576308] env[63024]: DEBUG oslo.service.loopingcall [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2104.579538] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2104.580060] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a5e512f5-1859-4004-adc8-c57d30920446 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.602764] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2104.602764] env[63024]: value = "task-1951761" [ 2104.602764] env[63024]: _type = "Task" [ 2104.602764] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2104.611623] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951761, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2104.638667] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e30a480c-f0cc-4e46-8bb1-0756e646ed2e tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "8826c266-659c-46ad-bb02-aefdffab8699" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2104.638859] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e30a480c-f0cc-4e46-8bb1-0756e646ed2e tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquired lock "8826c266-659c-46ad-bb02-aefdffab8699" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2104.639763] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e88e5fca-f60e-4b0f-ba61-67b5a86882ca {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.658356] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa927a47-37d6-4f5e-a364-a6eaca88f963 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.690346] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e30a480c-f0cc-4e46-8bb1-0756e646ed2e tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Reconfiguring VM to detach interface {{(pid=63024) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 2104.692973] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3dfc88e6-c947-41b8-acdd-854bcbad3d09 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2104.715056] env[63024]: DEBUG oslo_vmware.api [None req-e30a480c-f0cc-4e46-8bb1-0756e646ed2e tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 2104.715056] env[63024]: value = "task-1951762" [ 2104.715056] env[63024]: _type = "Task" [ 2104.715056] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2104.723934] env[63024]: DEBUG oslo_vmware.api [None req-e30a480c-f0cc-4e46-8bb1-0756e646ed2e tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951762, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2104.833950] env[63024]: DEBUG nova.network.neutron [req-b680d55e-fa1c-4506-b7e5-2f387ae8387e req-7179e185-8320-43e4-812c-f0c1b34665bb service nova] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Updated VIF entry in instance network info cache for port a13f8ab3-d900-447f-8772-5be6b3d48296. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2104.833950] env[63024]: DEBUG nova.network.neutron [req-b680d55e-fa1c-4506-b7e5-2f387ae8387e req-7179e185-8320-43e4-812c-f0c1b34665bb service nova] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Updating instance_info_cache with network_info: [{"id": "a13f8ab3-d900-447f-8772-5be6b3d48296", "address": "fa:16:3e:47:7f:73", "network": {"id": "0cbe22f7-6322-4d92-9a77-2753f6449a2d", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-366684254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6072e8931d9540ad8fe4a2b4b1ec782d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3ccbdbb-8b49-4a26-913f-2a448b72280f", "external-id": "nsx-vlan-transportzone-412", "segmentation_id": 412, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa13f8ab3-d9", "ovs_interfaceid": "a13f8ab3-d900-447f-8772-5be6b3d48296", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2104.906858] env[63024]: DEBUG oslo_concurrency.lockutils [None req-73be4c4a-e9da-44e7-aa1e-975c255c95ac tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2104.908070] env[63024]: INFO nova.compute.manager [None req-574165ee-2a9c-4a11-ace5-904298f406ce tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Took 0.56 seconds to detach 1 volumes for instance. [ 2105.091786] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91a2b136-7231-4887-afd1-93ee16b337d8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.099667] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df5de101-1175-405e-8ad8-4aafbbba39d1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.131413] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36741d7c-8eec-4c68-8284-ae8e351605e5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.136592] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951761, 'name': CreateVM_Task, 'duration_secs': 0.38916} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2105.137071] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2105.137775] env[63024]: DEBUG oslo_concurrency.lockutils [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2105.137914] env[63024]: DEBUG oslo_concurrency.lockutils [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2105.138268] env[63024]: DEBUG oslo_concurrency.lockutils [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2105.138512] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e8d0a8a-3c50-4f4e-becd-d3e309a20f98 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.143041] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7784fea-7475-41d2-8f11-557992d299d5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.147770] env[63024]: DEBUG oslo_vmware.api [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 2105.147770] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52f2b2b4-2343-e14e-38ec-14bd6b136e66" [ 2105.147770] env[63024]: _type = "Task" [ 2105.147770] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2105.158344] env[63024]: DEBUG nova.compute.provider_tree [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2105.164679] env[63024]: DEBUG oslo_vmware.api [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52f2b2b4-2343-e14e-38ec-14bd6b136e66, 'name': SearchDatastore_Task, 'duration_secs': 0.00986} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2105.164882] env[63024]: DEBUG oslo_concurrency.lockutils [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2105.165117] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2105.165345] env[63024]: DEBUG oslo_concurrency.lockutils [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2105.165489] env[63024]: DEBUG oslo_concurrency.lockutils [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2105.165665] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2105.165901] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6967b462-def3-45ca-94f2-f11174e1f7b8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.174219] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2105.174402] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2105.175068] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08f43bb4-fa06-4b22-8004-3cd958214a06 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.182120] env[63024]: DEBUG oslo_vmware.api [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 2105.182120] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52d4457c-90e3-8bad-ae5d-f8bde031569b" [ 2105.182120] env[63024]: _type = "Task" [ 2105.182120] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2105.189579] env[63024]: DEBUG oslo_vmware.api [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52d4457c-90e3-8bad-ae5d-f8bde031569b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2105.224159] env[63024]: DEBUG oslo_vmware.api [None req-e30a480c-f0cc-4e46-8bb1-0756e646ed2e tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951762, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2105.337040] env[63024]: DEBUG oslo_concurrency.lockutils [req-b680d55e-fa1c-4506-b7e5-2f387ae8387e req-7179e185-8320-43e4-812c-f0c1b34665bb service nova] Releasing lock "refresh_cache-fa326fe2-c00e-4379-954a-9b3275328abc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2105.337354] env[63024]: DEBUG nova.compute.manager [req-b680d55e-fa1c-4506-b7e5-2f387ae8387e req-7179e185-8320-43e4-812c-f0c1b34665bb service nova] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Received event network-vif-deleted-a75a35bb-1971-4617-9a1a-5750c7485384 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2105.337597] env[63024]: INFO nova.compute.manager [req-b680d55e-fa1c-4506-b7e5-2f387ae8387e req-7179e185-8320-43e4-812c-f0c1b34665bb service nova] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Neutron deleted interface a75a35bb-1971-4617-9a1a-5750c7485384; detaching it from the instance and deleting it from the info cache [ 2105.337832] env[63024]: DEBUG nova.network.neutron [req-b680d55e-fa1c-4506-b7e5-2f387ae8387e req-7179e185-8320-43e4-812c-f0c1b34665bb service nova] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2105.414200] env[63024]: DEBUG oslo_concurrency.lockutils [None req-574165ee-2a9c-4a11-ace5-904298f406ce tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2105.455966] env[63024]: DEBUG nova.compute.manager [req-d4e1ca16-c81a-44bf-b6e5-2fe761d0450c req-48f92fe6-a8a8-447a-ba72-69b83770a21b service nova] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] Received event network-vif-deleted-1ca53ff5-e854-46d6-ad35-04dc9c98d396 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2105.662502] env[63024]: DEBUG nova.scheduler.client.report [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2105.695269] env[63024]: DEBUG oslo_vmware.api [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52d4457c-90e3-8bad-ae5d-f8bde031569b, 'name': SearchDatastore_Task, 'duration_secs': 0.008658} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2105.695980] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb89fefb-184d-4c6f-a413-29f279527a11 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.702440] env[63024]: DEBUG oslo_vmware.api [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 2105.702440] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]523c5d99-6542-5061-fefc-f36cd739d924" [ 2105.702440] env[63024]: _type = "Task" [ 2105.702440] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2105.710762] env[63024]: DEBUG oslo_vmware.api [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]523c5d99-6542-5061-fefc-f36cd739d924, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2105.724069] env[63024]: DEBUG oslo_vmware.api [None req-e30a480c-f0cc-4e46-8bb1-0756e646ed2e tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951762, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2105.840206] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e90bcb09-8186-44de-80bd-59b2d593d83f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.852547] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95fbfa17-a303-425b-8c36-61346005d347 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2105.882840] env[63024]: DEBUG nova.compute.manager [req-b680d55e-fa1c-4506-b7e5-2f387ae8387e req-7179e185-8320-43e4-812c-f0c1b34665bb service nova] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Detach interface failed, port_id=a75a35bb-1971-4617-9a1a-5750c7485384, reason: Instance 881b1f35-206e-4c3f-bf7a-d1774a9343c2 could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 2106.004658] env[63024]: DEBUG oslo_concurrency.lockutils [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquiring lock "94893f45-fb96-463b-82a9-e2fd884b81f8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2106.004943] env[63024]: DEBUG oslo_concurrency.lockutils [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "94893f45-fb96-463b-82a9-e2fd884b81f8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2106.170221] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.257s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2106.170408] env[63024]: DEBUG nova.compute.manager [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2106.173285] env[63024]: DEBUG oslo_concurrency.lockutils [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.922s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2106.174702] env[63024]: INFO nova.compute.claims [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2106.214996] env[63024]: DEBUG oslo_vmware.api [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]523c5d99-6542-5061-fefc-f36cd739d924, 'name': SearchDatastore_Task, 'duration_secs': 0.010639} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2106.215299] env[63024]: DEBUG oslo_concurrency.lockutils [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2106.215575] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] fa326fe2-c00e-4379-954a-9b3275328abc/fa326fe2-c00e-4379-954a-9b3275328abc.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2106.216126] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4aead6f8-663c-436c-85b1-5627799e66c7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.226979] env[63024]: DEBUG oslo_vmware.api [None req-e30a480c-f0cc-4e46-8bb1-0756e646ed2e tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951762, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2106.228095] env[63024]: DEBUG oslo_vmware.api [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 2106.228095] env[63024]: value = "task-1951763" [ 2106.228095] env[63024]: _type = "Task" [ 2106.228095] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2106.236265] env[63024]: DEBUG oslo_vmware.api [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951763, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2106.507721] env[63024]: DEBUG nova.compute.manager [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2106.678882] env[63024]: DEBUG nova.compute.utils [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2106.682683] env[63024]: DEBUG nova.compute.manager [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2106.682858] env[63024]: DEBUG nova.network.neutron [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2106.725725] env[63024]: DEBUG oslo_vmware.api [None req-e30a480c-f0cc-4e46-8bb1-0756e646ed2e tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951762, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2106.730328] env[63024]: DEBUG nova.policy [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a2cc094a0a6b444ab1880fcfb1de4e8b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6bbfeec6d47746328f185acd132e0d5a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 2106.739960] env[63024]: DEBUG oslo_vmware.api [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951763, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.44417} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2106.740265] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] fa326fe2-c00e-4379-954a-9b3275328abc/fa326fe2-c00e-4379-954a-9b3275328abc.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2106.740493] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2106.740757] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d2087565-5ff1-42b9-a467-b63f8e011802 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2106.747732] env[63024]: DEBUG oslo_vmware.api [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 2106.747732] env[63024]: value = "task-1951764" [ 2106.747732] env[63024]: _type = "Task" [ 2106.747732] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2106.755837] env[63024]: DEBUG oslo_vmware.api [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951764, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2106.984785] env[63024]: DEBUG nova.network.neutron [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Successfully created port: d9aaf9d8-7a03-4fe5-8494-40497e138b13 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2107.031715] env[63024]: DEBUG oslo_concurrency.lockutils [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2107.182548] env[63024]: DEBUG nova.compute.manager [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2107.236017] env[63024]: DEBUG oslo_vmware.api [None req-e30a480c-f0cc-4e46-8bb1-0756e646ed2e tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951762, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2107.262251] env[63024]: DEBUG oslo_vmware.api [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951764, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.063315} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2107.262251] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2107.264557] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-482f77f9-3676-4fbe-abe1-83fde00ad303 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.291463] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Reconfiguring VM instance instance-0000006e to attach disk [datastore1] fa326fe2-c00e-4379-954a-9b3275328abc/fa326fe2-c00e-4379-954a-9b3275328abc.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2107.294227] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b1e08d79-da3b-41ea-aed4-fe8335fc09c7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.316363] env[63024]: DEBUG oslo_vmware.api [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 2107.316363] env[63024]: value = "task-1951765" [ 2107.316363] env[63024]: _type = "Task" [ 2107.316363] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2107.328837] env[63024]: DEBUG oslo_vmware.api [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951765, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2107.404808] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34d6ca69-4727-4fbc-ba97-1f1658088aca {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.412816] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dd71f81-dbc7-4d8c-8420-abaeedbdda83 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.443582] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-010a2c66-f2b1-491f-adc0-6b6c70ecffe6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.452019] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16ec6964-2dd8-4fba-b54d-c14fc66d344a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.464587] env[63024]: DEBUG nova.compute.provider_tree [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2107.690999] env[63024]: INFO nova.virt.block_device [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Booting with volume f9f5f8da-d54f-41bf-9fd3-c3e75748a910 at /dev/sda [ 2107.726730] env[63024]: DEBUG oslo_vmware.api [None req-e30a480c-f0cc-4e46-8bb1-0756e646ed2e tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951762, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2107.727820] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5fa921e3-2c26-4fad-b7c4-497debfbb2c3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.737182] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbb65061-91b7-4aeb-b484-565f0dfe9aae {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.768664] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7139773f-9ca8-46df-9e9a-147c11223b61 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.778637] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca464088-5e84-4d06-9329-59181b9e3b1f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.808704] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d15f2d6-7beb-428e-946e-38e55d495fd8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.815358] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b03511a7-6390-4e69-b620-8dfaf595cf6d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.826019] env[63024]: DEBUG oslo_vmware.api [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951765, 'name': ReconfigVM_Task, 'duration_secs': 0.374064} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2107.826324] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Reconfigured VM instance instance-0000006e to attach disk [datastore1] fa326fe2-c00e-4379-954a-9b3275328abc/fa326fe2-c00e-4379-954a-9b3275328abc.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2107.827018] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7afc710c-2651-47e5-9ef7-b04dd4d8f731 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2107.832518] env[63024]: DEBUG nova.virt.block_device [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Updating existing volume attachment record: f3d6596b-d713-4b9b-ad61-508b4e212d20 {{(pid=63024) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2107.835618] env[63024]: DEBUG oslo_vmware.api [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 2107.835618] env[63024]: value = "task-1951766" [ 2107.835618] env[63024]: _type = "Task" [ 2107.835618] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2107.844179] env[63024]: DEBUG oslo_vmware.api [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951766, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2107.967587] env[63024]: DEBUG nova.scheduler.client.report [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2108.228051] env[63024]: DEBUG oslo_vmware.api [None req-e30a480c-f0cc-4e46-8bb1-0756e646ed2e tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951762, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2108.346443] env[63024]: DEBUG oslo_vmware.api [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951766, 'name': Rename_Task, 'duration_secs': 0.14521} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2108.346731] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2108.346990] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8f26bb96-dd55-42c5-969a-a6da291c0496 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2108.355177] env[63024]: DEBUG oslo_vmware.api [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 2108.355177] env[63024]: value = "task-1951767" [ 2108.355177] env[63024]: _type = "Task" [ 2108.355177] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2108.363537] env[63024]: DEBUG oslo_vmware.api [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951767, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2108.472387] env[63024]: DEBUG oslo_concurrency.lockutils [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.299s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2108.472929] env[63024]: DEBUG nova.compute.manager [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2108.475601] env[63024]: DEBUG oslo_concurrency.lockutils [None req-73be4c4a-e9da-44e7-aa1e-975c255c95ac tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.569s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2108.475828] env[63024]: DEBUG nova.objects.instance [None req-73be4c4a-e9da-44e7-aa1e-975c255c95ac tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Lazy-loading 'resources' on Instance uuid e4d6e79b-f110-44c2-8201-926b57eeb68d {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2108.554746] env[63024]: DEBUG nova.compute.manager [req-9ca0487f-4a4c-4be2-960b-ca299409d146 req-dfc91de8-8519-4a55-b1a3-9c0bdc8bc0d4 service nova] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Received event network-vif-plugged-d9aaf9d8-7a03-4fe5-8494-40497e138b13 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2108.555030] env[63024]: DEBUG oslo_concurrency.lockutils [req-9ca0487f-4a4c-4be2-960b-ca299409d146 req-dfc91de8-8519-4a55-b1a3-9c0bdc8bc0d4 service nova] Acquiring lock "a483e6b5-a192-4cfe-be36-1ce0667f5697-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2108.555281] env[63024]: DEBUG oslo_concurrency.lockutils [req-9ca0487f-4a4c-4be2-960b-ca299409d146 req-dfc91de8-8519-4a55-b1a3-9c0bdc8bc0d4 service nova] Lock "a483e6b5-a192-4cfe-be36-1ce0667f5697-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2108.555612] env[63024]: DEBUG oslo_concurrency.lockutils [req-9ca0487f-4a4c-4be2-960b-ca299409d146 req-dfc91de8-8519-4a55-b1a3-9c0bdc8bc0d4 service nova] Lock "a483e6b5-a192-4cfe-be36-1ce0667f5697-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2108.555612] env[63024]: DEBUG nova.compute.manager [req-9ca0487f-4a4c-4be2-960b-ca299409d146 req-dfc91de8-8519-4a55-b1a3-9c0bdc8bc0d4 service nova] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] No waiting events found dispatching network-vif-plugged-d9aaf9d8-7a03-4fe5-8494-40497e138b13 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2108.555784] env[63024]: WARNING nova.compute.manager [req-9ca0487f-4a4c-4be2-960b-ca299409d146 req-dfc91de8-8519-4a55-b1a3-9c0bdc8bc0d4 service nova] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Received unexpected event network-vif-plugged-d9aaf9d8-7a03-4fe5-8494-40497e138b13 for instance with vm_state building and task_state block_device_mapping. [ 2108.689397] env[63024]: DEBUG nova.network.neutron [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Successfully updated port: d9aaf9d8-7a03-4fe5-8494-40497e138b13 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2108.728318] env[63024]: DEBUG oslo_vmware.api [None req-e30a480c-f0cc-4e46-8bb1-0756e646ed2e tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951762, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2108.866499] env[63024]: DEBUG oslo_vmware.api [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951767, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2108.979604] env[63024]: DEBUG nova.compute.utils [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2108.981504] env[63024]: DEBUG nova.compute.manager [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2108.981504] env[63024]: DEBUG nova.network.neutron [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2109.025954] env[63024]: DEBUG nova.policy [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '893bfe0d8eef423aae6c7eb5cdc1a9e9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '18540818b60e4483963d14559bc5c38d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 2109.136838] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08789b11-2be7-42d1-b8d0-f317213252cb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.144743] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8d0d105-a82f-46ec-8c93-ad5c083a1c78 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.174276] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be91d58f-abb4-4c4e-b72e-36ab01d6f636 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.181896] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51f1364e-2add-4833-b37f-9290f5c78f31 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.196746] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "refresh_cache-a483e6b5-a192-4cfe-be36-1ce0667f5697" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2109.196897] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquired lock "refresh_cache-a483e6b5-a192-4cfe-be36-1ce0667f5697" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2109.197061] env[63024]: DEBUG nova.network.neutron [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2109.202018] env[63024]: DEBUG nova.compute.provider_tree [None req-73be4c4a-e9da-44e7-aa1e-975c255c95ac tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2109.227950] env[63024]: DEBUG oslo_vmware.api [None req-e30a480c-f0cc-4e46-8bb1-0756e646ed2e tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951762, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2109.284243] env[63024]: DEBUG nova.network.neutron [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Successfully created port: bd02ff9f-c94b-4697-90ee-516a789ffac6 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2109.371367] env[63024]: DEBUG oslo_vmware.api [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951767, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2109.416597] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2109.416854] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2109.491117] env[63024]: DEBUG nova.compute.manager [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2109.704296] env[63024]: DEBUG nova.scheduler.client.report [None req-73be4c4a-e9da-44e7-aa1e-975c255c95ac tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2109.730301] env[63024]: DEBUG oslo_vmware.api [None req-e30a480c-f0cc-4e46-8bb1-0756e646ed2e tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951762, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2109.733835] env[63024]: DEBUG nova.network.neutron [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2109.873967] env[63024]: DEBUG oslo_vmware.api [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951767, 'name': PowerOnVM_Task, 'duration_secs': 1.12599} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2109.876687] env[63024]: DEBUG nova.network.neutron [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Updating instance_info_cache with network_info: [{"id": "d9aaf9d8-7a03-4fe5-8494-40497e138b13", "address": "fa:16:3e:37:15:c7", "network": {"id": "8b46e914-e3e8-40c4-8f9d-01d1f97bf25e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1558746173-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bbfeec6d47746328f185acd132e0d5a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afae6acd-1873-4228-9d5a-1cd5d4efe3e4", "external-id": "nsx-vlan-transportzone-183", "segmentation_id": 183, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9aaf9d8-7a", "ovs_interfaceid": "d9aaf9d8-7a03-4fe5-8494-40497e138b13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2109.877988] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2109.878221] env[63024]: INFO nova.compute.manager [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Took 8.50 seconds to spawn the instance on the hypervisor. [ 2109.878402] env[63024]: DEBUG nova.compute.manager [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2109.879752] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22cf65de-6991-41dd-94f7-f0e4409e9c42 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.915547] env[63024]: DEBUG nova.compute.manager [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2109.915646] env[63024]: DEBUG nova.virt.hardware [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2109.915808] env[63024]: DEBUG nova.virt.hardware [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2109.915959] env[63024]: DEBUG nova.virt.hardware [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2109.916155] env[63024]: DEBUG nova.virt.hardware [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2109.916297] env[63024]: DEBUG nova.virt.hardware [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2109.916438] env[63024]: DEBUG nova.virt.hardware [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2109.916651] env[63024]: DEBUG nova.virt.hardware [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2109.916912] env[63024]: DEBUG nova.virt.hardware [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2109.917108] env[63024]: DEBUG nova.virt.hardware [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2109.917277] env[63024]: DEBUG nova.virt.hardware [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2109.917447] env[63024]: DEBUG nova.virt.hardware [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2109.918324] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f97dd9e7-c5de-4d35-b119-e8c82a7c4105 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2109.931541] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2109.931541] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Starting heal instance info cache {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10258}} [ 2109.934129] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e58daedd-aede-4911-b9c7-b00e921f19a3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.208855] env[63024]: DEBUG oslo_concurrency.lockutils [None req-73be4c4a-e9da-44e7-aa1e-975c255c95ac tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.733s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2110.211326] env[63024]: DEBUG oslo_concurrency.lockutils [None req-574165ee-2a9c-4a11-ace5-904298f406ce tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.797s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2110.211649] env[63024]: DEBUG nova.objects.instance [None req-574165ee-2a9c-4a11-ace5-904298f406ce tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lazy-loading 'resources' on Instance uuid 881b1f35-206e-4c3f-bf7a-d1774a9343c2 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2110.231715] env[63024]: DEBUG oslo_vmware.api [None req-e30a480c-f0cc-4e46-8bb1-0756e646ed2e tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951762, 'name': ReconfigVM_Task} progress is 18%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2110.232715] env[63024]: INFO nova.scheduler.client.report [None req-73be4c4a-e9da-44e7-aa1e-975c255c95ac tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Deleted allocations for instance e4d6e79b-f110-44c2-8201-926b57eeb68d [ 2110.379875] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Releasing lock "refresh_cache-a483e6b5-a192-4cfe-be36-1ce0667f5697" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2110.380271] env[63024]: DEBUG nova.compute.manager [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Instance network_info: |[{"id": "d9aaf9d8-7a03-4fe5-8494-40497e138b13", "address": "fa:16:3e:37:15:c7", "network": {"id": "8b46e914-e3e8-40c4-8f9d-01d1f97bf25e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1558746173-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bbfeec6d47746328f185acd132e0d5a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afae6acd-1873-4228-9d5a-1cd5d4efe3e4", "external-id": "nsx-vlan-transportzone-183", "segmentation_id": 183, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9aaf9d8-7a", "ovs_interfaceid": "d9aaf9d8-7a03-4fe5-8494-40497e138b13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2110.380602] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:37:15:c7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'afae6acd-1873-4228-9d5a-1cd5d4efe3e4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd9aaf9d8-7a03-4fe5-8494-40497e138b13', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2110.388867] env[63024]: DEBUG oslo.service.loopingcall [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2110.389128] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2110.389357] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9c5ad721-53d0-413f-b6a5-de2e0331d651 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.410025] env[63024]: INFO nova.compute.manager [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Took 13.66 seconds to build instance. [ 2110.414167] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2110.414167] env[63024]: value = "task-1951768" [ 2110.414167] env[63024]: _type = "Task" [ 2110.414167] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2110.422689] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951768, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2110.465543] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "refresh_cache-e8ad74ce-7862-4574-98e7-14bc54bd5d6c" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2110.465698] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquired lock "refresh_cache-e8ad74ce-7862-4574-98e7-14bc54bd5d6c" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2110.465843] env[63024]: DEBUG nova.network.neutron [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Forcefully refreshing network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2110.500926] env[63024]: DEBUG nova.compute.manager [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2110.528410] env[63024]: DEBUG nova.virt.hardware [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2110.528830] env[63024]: DEBUG nova.virt.hardware [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2110.529034] env[63024]: DEBUG nova.virt.hardware [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2110.529234] env[63024]: DEBUG nova.virt.hardware [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2110.529391] env[63024]: DEBUG nova.virt.hardware [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2110.529541] env[63024]: DEBUG nova.virt.hardware [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2110.529754] env[63024]: DEBUG nova.virt.hardware [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2110.529914] env[63024]: DEBUG nova.virt.hardware [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2110.530107] env[63024]: DEBUG nova.virt.hardware [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2110.530278] env[63024]: DEBUG nova.virt.hardware [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2110.530448] env[63024]: DEBUG nova.virt.hardware [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2110.531319] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c7289c8-3be4-4c01-a2ec-a1285f6511df {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.539321] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-facd8d6a-dfbf-4c4b-a347-850400318d00 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.582774] env[63024]: DEBUG nova.compute.manager [req-37456905-b4b6-4425-8e77-f875a40cf2b9 req-9730c203-e7bd-4638-b9a2-ce5a45b73d99 service nova] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Received event network-changed-d9aaf9d8-7a03-4fe5-8494-40497e138b13 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2110.582974] env[63024]: DEBUG nova.compute.manager [req-37456905-b4b6-4425-8e77-f875a40cf2b9 req-9730c203-e7bd-4638-b9a2-ce5a45b73d99 service nova] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Refreshing instance network info cache due to event network-changed-d9aaf9d8-7a03-4fe5-8494-40497e138b13. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2110.583241] env[63024]: DEBUG oslo_concurrency.lockutils [req-37456905-b4b6-4425-8e77-f875a40cf2b9 req-9730c203-e7bd-4638-b9a2-ce5a45b73d99 service nova] Acquiring lock "refresh_cache-a483e6b5-a192-4cfe-be36-1ce0667f5697" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2110.583405] env[63024]: DEBUG oslo_concurrency.lockutils [req-37456905-b4b6-4425-8e77-f875a40cf2b9 req-9730c203-e7bd-4638-b9a2-ce5a45b73d99 service nova] Acquired lock "refresh_cache-a483e6b5-a192-4cfe-be36-1ce0667f5697" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2110.583637] env[63024]: DEBUG nova.network.neutron [req-37456905-b4b6-4425-8e77-f875a40cf2b9 req-9730c203-e7bd-4638-b9a2-ce5a45b73d99 service nova] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Refreshing network info cache for port d9aaf9d8-7a03-4fe5-8494-40497e138b13 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2110.730860] env[63024]: DEBUG oslo_vmware.api [None req-e30a480c-f0cc-4e46-8bb1-0756e646ed2e tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951762, 'name': ReconfigVM_Task, 'duration_secs': 5.863375} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2110.731669] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e30a480c-f0cc-4e46-8bb1-0756e646ed2e tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Releasing lock "8826c266-659c-46ad-bb02-aefdffab8699" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2110.731669] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e30a480c-f0cc-4e46-8bb1-0756e646ed2e tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Reconfigured VM to detach interface {{(pid=63024) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 2110.741467] env[63024]: DEBUG oslo_concurrency.lockutils [None req-73be4c4a-e9da-44e7-aa1e-975c255c95ac tempest-ServerPasswordTestJSON-405548590 tempest-ServerPasswordTestJSON-405548590-project-member] Lock "e4d6e79b-f110-44c2-8201-926b57eeb68d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.689s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2110.894689] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f64a01b-6ddd-4632-8381-0b9def783f34 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.903017] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-212eb271-57e6-4fe4-aed7-66ff39a2b9ff {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.938029] env[63024]: DEBUG oslo_concurrency.lockutils [None req-97d161a6-05d9-40c7-8977-e60dd9a91489 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "fa326fe2-c00e-4379-954a-9b3275328abc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.198s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2110.941324] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b73b731b-edb3-462d-bb7a-2d9cad670520 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.952136] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eec01ca9-88a6-44f3-af19-ce91a13530cf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.955944] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951768, 'name': CreateVM_Task, 'duration_secs': 0.385955} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2110.956134] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2110.957160] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'guest_format': None, 'attachment_id': 'f3d6596b-d713-4b9b-ad61-508b4e212d20', 'boot_index': 0, 'delete_on_termination': True, 'mount_device': '/dev/sda', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402248', 'volume_id': 'f9f5f8da-d54f-41bf-9fd3-c3e75748a910', 'name': 'volume-f9f5f8da-d54f-41bf-9fd3-c3e75748a910', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a483e6b5-a192-4cfe-be36-1ce0667f5697', 'attached_at': '', 'detached_at': '', 'volume_id': 'f9f5f8da-d54f-41bf-9fd3-c3e75748a910', 'serial': 'f9f5f8da-d54f-41bf-9fd3-c3e75748a910'}, 'device_type': None, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=63024) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 2110.957379] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Root volume attach. Driver type: vmdk {{(pid=63024) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 2110.958131] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f19ae51b-2e13-4eb8-9c02-605e8b37e9f4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.969955] env[63024]: DEBUG nova.compute.provider_tree [None req-574165ee-2a9c-4a11-ace5-904298f406ce tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2110.976721] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87943782-c4bb-476b-8412-2a778f90beea {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.983339] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-704906a0-7194-4324-886b-5eec94f32fbd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.990333] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-22094919-4c0b-48bf-b1eb-f370b062f513 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2110.999383] env[63024]: DEBUG oslo_vmware.api [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 2110.999383] env[63024]: value = "task-1951769" [ 2110.999383] env[63024]: _type = "Task" [ 2110.999383] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2111.008574] env[63024]: DEBUG oslo_vmware.api [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951769, 'name': RelocateVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2111.474108] env[63024]: DEBUG nova.scheduler.client.report [None req-574165ee-2a9c-4a11-ace5-904298f406ce tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2111.497906] env[63024]: DEBUG nova.network.neutron [req-37456905-b4b6-4425-8e77-f875a40cf2b9 req-9730c203-e7bd-4638-b9a2-ce5a45b73d99 service nova] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Updated VIF entry in instance network info cache for port d9aaf9d8-7a03-4fe5-8494-40497e138b13. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2111.498287] env[63024]: DEBUG nova.network.neutron [req-37456905-b4b6-4425-8e77-f875a40cf2b9 req-9730c203-e7bd-4638-b9a2-ce5a45b73d99 service nova] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Updating instance_info_cache with network_info: [{"id": "d9aaf9d8-7a03-4fe5-8494-40497e138b13", "address": "fa:16:3e:37:15:c7", "network": {"id": "8b46e914-e3e8-40c4-8f9d-01d1f97bf25e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1558746173-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bbfeec6d47746328f185acd132e0d5a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afae6acd-1873-4228-9d5a-1cd5d4efe3e4", "external-id": "nsx-vlan-transportzone-183", "segmentation_id": 183, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9aaf9d8-7a", "ovs_interfaceid": "d9aaf9d8-7a03-4fe5-8494-40497e138b13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2111.514957] env[63024]: DEBUG oslo_vmware.api [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951769, 'name': RelocateVM_Task, 'duration_secs': 0.348888} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2111.515268] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Volume attach. Driver type: vmdk {{(pid=63024) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2111.515473] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402248', 'volume_id': 'f9f5f8da-d54f-41bf-9fd3-c3e75748a910', 'name': 'volume-f9f5f8da-d54f-41bf-9fd3-c3e75748a910', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a483e6b5-a192-4cfe-be36-1ce0667f5697', 'attached_at': '', 'detached_at': '', 'volume_id': 'f9f5f8da-d54f-41bf-9fd3-c3e75748a910', 'serial': 'f9f5f8da-d54f-41bf-9fd3-c3e75748a910'} {{(pid=63024) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2111.516486] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0f87360-175f-4181-a37d-7b417e20d351 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.534675] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e014185-7dfb-474d-995a-3a11b68bc138 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.558035] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] volume-f9f5f8da-d54f-41bf-9fd3-c3e75748a910/volume-f9f5f8da-d54f-41bf-9fd3-c3e75748a910.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2111.560540] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b45887af-282a-436f-813a-50d0472c2cf5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2111.584802] env[63024]: DEBUG oslo_vmware.api [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 2111.584802] env[63024]: value = "task-1951770" [ 2111.584802] env[63024]: _type = "Task" [ 2111.584802] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2111.593760] env[63024]: DEBUG oslo_vmware.api [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951770, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2111.694378] env[63024]: DEBUG nova.compute.manager [req-ffb14287-2b83-4cda-aeab-d3160893313b req-b5c5d43f-07fb-42b4-b90f-8381618d84fa service nova] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Received event network-changed-a13f8ab3-d900-447f-8772-5be6b3d48296 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2111.694612] env[63024]: DEBUG nova.compute.manager [req-ffb14287-2b83-4cda-aeab-d3160893313b req-b5c5d43f-07fb-42b4-b90f-8381618d84fa service nova] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Refreshing instance network info cache due to event network-changed-a13f8ab3-d900-447f-8772-5be6b3d48296. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2111.694840] env[63024]: DEBUG oslo_concurrency.lockutils [req-ffb14287-2b83-4cda-aeab-d3160893313b req-b5c5d43f-07fb-42b4-b90f-8381618d84fa service nova] Acquiring lock "refresh_cache-fa326fe2-c00e-4379-954a-9b3275328abc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2111.694993] env[63024]: DEBUG oslo_concurrency.lockutils [req-ffb14287-2b83-4cda-aeab-d3160893313b req-b5c5d43f-07fb-42b4-b90f-8381618d84fa service nova] Acquired lock "refresh_cache-fa326fe2-c00e-4379-954a-9b3275328abc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2111.695441] env[63024]: DEBUG nova.network.neutron [req-ffb14287-2b83-4cda-aeab-d3160893313b req-b5c5d43f-07fb-42b4-b90f-8381618d84fa service nova] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Refreshing network info cache for port a13f8ab3-d900-447f-8772-5be6b3d48296 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2111.792876] env[63024]: DEBUG nova.network.neutron [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Updating instance_info_cache with network_info: [{"id": "209c547a-fef6-4e81-9221-59b72099faa5", "address": "fa:16:3e:f0:17:5b", "network": {"id": "8b46e914-e3e8-40c4-8f9d-01d1f97bf25e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1558746173-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bbfeec6d47746328f185acd132e0d5a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afae6acd-1873-4228-9d5a-1cd5d4efe3e4", "external-id": "nsx-vlan-transportzone-183", "segmentation_id": 183, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap209c547a-fe", "ovs_interfaceid": "209c547a-fef6-4e81-9221-59b72099faa5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2111.979791] env[63024]: DEBUG oslo_concurrency.lockutils [None req-574165ee-2a9c-4a11-ace5-904298f406ce tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.768s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2111.982332] env[63024]: DEBUG oslo_concurrency.lockutils [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.951s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2111.983954] env[63024]: INFO nova.compute.claims [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2112.001652] env[63024]: DEBUG oslo_concurrency.lockutils [req-37456905-b4b6-4425-8e77-f875a40cf2b9 req-9730c203-e7bd-4638-b9a2-ce5a45b73d99 service nova] Releasing lock "refresh_cache-a483e6b5-a192-4cfe-be36-1ce0667f5697" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2112.013589] env[63024]: INFO nova.scheduler.client.report [None req-574165ee-2a9c-4a11-ace5-904298f406ce tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Deleted allocations for instance 881b1f35-206e-4c3f-bf7a-d1774a9343c2 [ 2112.100543] env[63024]: DEBUG oslo_vmware.api [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951770, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2112.295785] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Releasing lock "refresh_cache-e8ad74ce-7862-4574-98e7-14bc54bd5d6c" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2112.296011] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Updated the network info_cache for instance {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10329}} [ 2112.296282] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2112.296452] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2112.296627] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2112.296776] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2112.296909] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2112.297438] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2112.297438] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63024) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10877}} [ 2112.297438] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager.update_available_resource {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2112.458351] env[63024]: DEBUG nova.network.neutron [req-ffb14287-2b83-4cda-aeab-d3160893313b req-b5c5d43f-07fb-42b4-b90f-8381618d84fa service nova] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Updated VIF entry in instance network info cache for port a13f8ab3-d900-447f-8772-5be6b3d48296. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2112.458766] env[63024]: DEBUG nova.network.neutron [req-ffb14287-2b83-4cda-aeab-d3160893313b req-b5c5d43f-07fb-42b4-b90f-8381618d84fa service nova] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Updating instance_info_cache with network_info: [{"id": "a13f8ab3-d900-447f-8772-5be6b3d48296", "address": "fa:16:3e:47:7f:73", "network": {"id": "0cbe22f7-6322-4d92-9a77-2753f6449a2d", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-366684254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.141", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6072e8931d9540ad8fe4a2b4b1ec782d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3ccbdbb-8b49-4a26-913f-2a448b72280f", "external-id": "nsx-vlan-transportzone-412", "segmentation_id": 412, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa13f8ab3-d9", "ovs_interfaceid": "a13f8ab3-d900-447f-8772-5be6b3d48296", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2112.523686] env[63024]: DEBUG oslo_concurrency.lockutils [None req-574165ee-2a9c-4a11-ace5-904298f406ce tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "881b1f35-206e-4c3f-bf7a-d1774a9343c2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.648s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2112.596803] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e30a480c-f0cc-4e46-8bb1-0756e646ed2e tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "refresh_cache-8826c266-659c-46ad-bb02-aefdffab8699" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2112.596803] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e30a480c-f0cc-4e46-8bb1-0756e646ed2e tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquired lock "refresh_cache-8826c266-659c-46ad-bb02-aefdffab8699" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2112.596803] env[63024]: DEBUG nova.network.neutron [None req-e30a480c-f0cc-4e46-8bb1-0756e646ed2e tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2112.600542] env[63024]: DEBUG oslo_vmware.api [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951770, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2112.733122] env[63024]: DEBUG oslo_concurrency.lockutils [None req-345d0855-494a-4079-b4ed-71d4166b462c tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "8826c266-659c-46ad-bb02-aefdffab8699" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2112.733416] env[63024]: DEBUG oslo_concurrency.lockutils [None req-345d0855-494a-4079-b4ed-71d4166b462c tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "8826c266-659c-46ad-bb02-aefdffab8699" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2112.733747] env[63024]: DEBUG oslo_concurrency.lockutils [None req-345d0855-494a-4079-b4ed-71d4166b462c tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "8826c266-659c-46ad-bb02-aefdffab8699-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2112.733944] env[63024]: DEBUG oslo_concurrency.lockutils [None req-345d0855-494a-4079-b4ed-71d4166b462c tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "8826c266-659c-46ad-bb02-aefdffab8699-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2112.734168] env[63024]: DEBUG oslo_concurrency.lockutils [None req-345d0855-494a-4079-b4ed-71d4166b462c tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "8826c266-659c-46ad-bb02-aefdffab8699-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2112.736723] env[63024]: INFO nova.compute.manager [None req-345d0855-494a-4079-b4ed-71d4166b462c tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Terminating instance [ 2112.800230] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2112.962625] env[63024]: DEBUG oslo_concurrency.lockutils [req-ffb14287-2b83-4cda-aeab-d3160893313b req-b5c5d43f-07fb-42b4-b90f-8381618d84fa service nova] Releasing lock "refresh_cache-fa326fe2-c00e-4379-954a-9b3275328abc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2113.100146] env[63024]: DEBUG oslo_vmware.api [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951770, 'name': ReconfigVM_Task, 'duration_secs': 1.247768} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2113.100410] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Reconfigured VM instance instance-0000006f to attach disk [datastore1] volume-f9f5f8da-d54f-41bf-9fd3-c3e75748a910/volume-f9f5f8da-d54f-41bf-9fd3-c3e75748a910.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2113.110145] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-11fae07a-3d1c-4aba-8e9b-88092c8b33ca {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.127854] env[63024]: DEBUG oslo_vmware.api [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 2113.127854] env[63024]: value = "task-1951771" [ 2113.127854] env[63024]: _type = "Task" [ 2113.127854] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2113.136586] env[63024]: DEBUG oslo_vmware.api [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951771, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2113.162071] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83ca6867-bfb7-4bb8-a780-9b4615eb5fdc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.169841] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dc67afb-ac02-4b89-8a31-c82638690b8a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.204083] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81943655-72df-4324-8dbb-899935c51f46 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.212828] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f2f2d8b-0bb2-42e6-8238-14733f513148 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.229389] env[63024]: DEBUG nova.compute.provider_tree [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2113.241748] env[63024]: DEBUG nova.compute.manager [None req-345d0855-494a-4079-b4ed-71d4166b462c tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2113.241985] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-345d0855-494a-4079-b4ed-71d4166b462c tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2113.243108] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4d67942-831c-4f34-abda-2e002ea772be {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.254614] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-345d0855-494a-4079-b4ed-71d4166b462c tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2113.254812] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3b47066e-9d9a-4ca1-a495-d58672d52798 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.262251] env[63024]: DEBUG oslo_vmware.api [None req-345d0855-494a-4079-b4ed-71d4166b462c tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 2113.262251] env[63024]: value = "task-1951772" [ 2113.262251] env[63024]: _type = "Task" [ 2113.262251] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2113.271475] env[63024]: DEBUG oslo_vmware.api [None req-345d0855-494a-4079-b4ed-71d4166b462c tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951772, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2113.406695] env[63024]: INFO nova.network.neutron [None req-e30a480c-f0cc-4e46-8bb1-0756e646ed2e tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Port b0190de6-3c0b-430e-9952-40bdf36d8b58 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 2113.407412] env[63024]: DEBUG nova.network.neutron [None req-e30a480c-f0cc-4e46-8bb1-0756e646ed2e tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Updating instance_info_cache with network_info: [{"id": "340baee8-fd68-482a-94ce-82df41470c62", "address": "fa:16:3e:0c:e1:92", "network": {"id": "ffb24eaf-c6b6-414f-a69a-0c8806712ddd", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-281590202-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9521048e807c4ca2a6d2e74a72b829a3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e445fb59-822c-4d7d-943b-c8e3bbaca62e", "external-id": "nsx-vlan-transportzone-258", "segmentation_id": 258, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap340baee8-fd", "ovs_interfaceid": "340baee8-fd68-482a-94ce-82df41470c62", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2113.639546] env[63024]: DEBUG oslo_vmware.api [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951771, 'name': ReconfigVM_Task, 'duration_secs': 0.200363} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2113.639845] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402248', 'volume_id': 'f9f5f8da-d54f-41bf-9fd3-c3e75748a910', 'name': 'volume-f9f5f8da-d54f-41bf-9fd3-c3e75748a910', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a483e6b5-a192-4cfe-be36-1ce0667f5697', 'attached_at': '', 'detached_at': '', 'volume_id': 'f9f5f8da-d54f-41bf-9fd3-c3e75748a910', 'serial': 'f9f5f8da-d54f-41bf-9fd3-c3e75748a910'} {{(pid=63024) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2113.640637] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-acd71642-4a29-4d47-9313-45ff12204598 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.649125] env[63024]: DEBUG oslo_vmware.api [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 2113.649125] env[63024]: value = "task-1951773" [ 2113.649125] env[63024]: _type = "Task" [ 2113.649125] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2113.660625] env[63024]: DEBUG oslo_vmware.api [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951773, 'name': Rename_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2113.733608] env[63024]: DEBUG nova.scheduler.client.report [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2113.773015] env[63024]: DEBUG oslo_vmware.api [None req-345d0855-494a-4079-b4ed-71d4166b462c tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951772, 'name': PowerOffVM_Task, 'duration_secs': 0.17659} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2113.773406] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-345d0855-494a-4079-b4ed-71d4166b462c tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2113.773725] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-345d0855-494a-4079-b4ed-71d4166b462c tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2113.774056] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-82d5674e-6fc5-40c5-860a-247df9279aaa {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2113.910342] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e30a480c-f0cc-4e46-8bb1-0756e646ed2e tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Releasing lock "refresh_cache-8826c266-659c-46ad-bb02-aefdffab8699" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2114.160133] env[63024]: DEBUG oslo_vmware.api [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951773, 'name': Rename_Task, 'duration_secs': 0.134759} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2114.160434] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2114.160715] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f33dc9f9-a855-4662-9e74-60f11b14e41a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.172621] env[63024]: DEBUG oslo_vmware.api [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 2114.172621] env[63024]: value = "task-1951775" [ 2114.172621] env[63024]: _type = "Task" [ 2114.172621] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2114.189524] env[63024]: DEBUG oslo_vmware.api [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951775, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2114.240751] env[63024]: DEBUG oslo_concurrency.lockutils [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.258s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2114.241324] env[63024]: DEBUG nova.compute.manager [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2114.244252] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 1.444s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2114.244357] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2114.244514] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63024) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2114.245576] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-107d9eed-a3aa-4e6c-8019-1b5394cf0f9a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.254521] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a215b96-8286-459e-b5f7-1a6bb577664d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.269649] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08604632-b698-408b-a1ca-65d57f860066 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.278367] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20f23f33-1427-4a7c-88f5-9d687ff2ed5c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2114.311406] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179552MB free_disk=170GB free_vcpus=48 pci_devices=None {{(pid=63024) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2114.311664] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2114.311787] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2114.414419] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e30a480c-f0cc-4e46-8bb1-0756e646ed2e tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "interface-8826c266-659c-46ad-bb02-aefdffab8699-b0190de6-3c0b-430e-9952-40bdf36d8b58" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.278s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2114.685057] env[63024]: DEBUG oslo_vmware.api [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951775, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2114.747307] env[63024]: DEBUG nova.compute.utils [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2114.748777] env[63024]: DEBUG nova.compute.manager [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2114.748947] env[63024]: DEBUG nova.network.neutron [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2114.798235] env[63024]: DEBUG nova.policy [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f215c99539cd43039ffdb0c6cf70beaf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0d0715f0ccbd49ec8af8e3049d970994', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 2114.830189] env[63024]: DEBUG oslo_concurrency.lockutils [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "beefd67c-b791-4c19-822b-b0e21ec5f8ac" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2114.830443] env[63024]: DEBUG oslo_concurrency.lockutils [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "beefd67c-b791-4c19-822b-b0e21ec5f8ac" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2115.087762] env[63024]: DEBUG nova.network.neutron [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Successfully created port: 61d84146-ea46-4ba6-ab7a-7e81dec991d2 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2115.183859] env[63024]: DEBUG oslo_vmware.api [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951775, 'name': PowerOnVM_Task, 'duration_secs': 0.522534} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2115.184161] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2115.184362] env[63024]: INFO nova.compute.manager [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Took 5.27 seconds to spawn the instance on the hypervisor. [ 2115.184564] env[63024]: DEBUG nova.compute.manager [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2115.186044] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31a26fad-1f89-4fab-9d46-4dea2ec9525e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.251730] env[63024]: DEBUG nova.compute.manager [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2115.333733] env[63024]: DEBUG nova.compute.manager [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2115.346789] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance e8ad74ce-7862-4574-98e7-14bc54bd5d6c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2115.346950] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance df2933d1-32c3-48a6-8ceb-d5e3047d0b78 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2115.347086] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 34e4db8e-e0d9-4a27-9368-c5e711b51a29 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2115.347207] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 8826c266-659c-46ad-bb02-aefdffab8699 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2115.347322] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 0d253199-adf8-45c0-a6bf-b11c12b08688 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2115.347434] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 5192ad93-a4e9-4aa0-983d-186ab17360f0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2115.347543] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance cb038d54-b785-4930-b8a5-b309c5f4b58d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2115.347654] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance fa326fe2-c00e-4379-954a-9b3275328abc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2115.347762] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance a483e6b5-a192-4cfe-be36-1ce0667f5697 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2115.347876] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 3f350c3e-e9b3-4798-a424-fd32235d21cf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2115.347982] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 94893f45-fb96-463b-82a9-e2fd884b81f8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2115.705060] env[63024]: INFO nova.compute.manager [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Took 11.81 seconds to build instance. [ 2115.851099] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance beefd67c-b791-4c19-822b-b0e21ec5f8ac has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2115.851363] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Total usable vcpus: 48, total allocated vcpus: 11 {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2115.851516] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2624MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=11 pci_stats=[] {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2115.855500] env[63024]: DEBUG oslo_concurrency.lockutils [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2115.983603] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-686561c7-9526-4eb4-9da3-030e3d03ab1c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2115.991763] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2efa7b2-b50e-456b-800f-a62c44bc9ec3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.022120] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d882e2d8-fb94-4120-937b-204a98a17c7d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.029939] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-035d2378-5b69-47b2-87bd-d4338914a676 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.043065] env[63024]: DEBUG nova.compute.provider_tree [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2116.207195] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0dd45080-5d7a-491c-b5d1-03e33e95ff0b tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "a483e6b5-a192-4cfe-be36-1ce0667f5697" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.324s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2116.261877] env[63024]: DEBUG nova.compute.manager [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2116.288699] env[63024]: DEBUG nova.virt.hardware [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2116.288954] env[63024]: DEBUG nova.virt.hardware [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2116.289125] env[63024]: DEBUG nova.virt.hardware [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2116.289314] env[63024]: DEBUG nova.virt.hardware [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2116.289460] env[63024]: DEBUG nova.virt.hardware [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2116.289626] env[63024]: DEBUG nova.virt.hardware [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2116.289861] env[63024]: DEBUG nova.virt.hardware [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2116.290035] env[63024]: DEBUG nova.virt.hardware [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2116.290212] env[63024]: DEBUG nova.virt.hardware [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2116.290374] env[63024]: DEBUG nova.virt.hardware [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2116.290553] env[63024]: DEBUG nova.virt.hardware [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2116.291409] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-607a58e0-af3e-466f-a6ab-afb539ea6420 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.300601] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afd89926-8507-495f-addb-fef4c3b9e403 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2116.546782] env[63024]: DEBUG nova.scheduler.client.report [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2117.051223] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63024) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2117.051495] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.740s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2117.051771] env[63024]: DEBUG oslo_concurrency.lockutils [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.196s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2117.053293] env[63024]: INFO nova.compute.claims [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2117.055837] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2117.055984] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Cleaning up deleted instances with incomplete migration {{(pid=63024) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11596}} [ 2117.086952] env[63024]: DEBUG nova.compute.manager [req-90d53bab-f7af-46d5-a005-5850ceb68ee6 req-52036ff4-1b83-4116-bc05-9ecc02d40966 service nova] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Received event network-changed-209c547a-fef6-4e81-9221-59b72099faa5 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2117.086952] env[63024]: DEBUG nova.compute.manager [req-90d53bab-f7af-46d5-a005-5850ceb68ee6 req-52036ff4-1b83-4116-bc05-9ecc02d40966 service nova] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Refreshing instance network info cache due to event network-changed-209c547a-fef6-4e81-9221-59b72099faa5. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2117.087124] env[63024]: DEBUG oslo_concurrency.lockutils [req-90d53bab-f7af-46d5-a005-5850ceb68ee6 req-52036ff4-1b83-4116-bc05-9ecc02d40966 service nova] Acquiring lock "refresh_cache-e8ad74ce-7862-4574-98e7-14bc54bd5d6c" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2117.087266] env[63024]: DEBUG oslo_concurrency.lockutils [req-90d53bab-f7af-46d5-a005-5850ceb68ee6 req-52036ff4-1b83-4116-bc05-9ecc02d40966 service nova] Acquired lock "refresh_cache-e8ad74ce-7862-4574-98e7-14bc54bd5d6c" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2117.087421] env[63024]: DEBUG nova.network.neutron [req-90d53bab-f7af-46d5-a005-5850ceb68ee6 req-52036ff4-1b83-4116-bc05-9ecc02d40966 service nova] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Refreshing network info cache for port 209c547a-fef6-4e81-9221-59b72099faa5 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2117.305683] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-345d0855-494a-4079-b4ed-71d4166b462c tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2117.305935] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-345d0855-494a-4079-b4ed-71d4166b462c tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2117.306512] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-345d0855-494a-4079-b4ed-71d4166b462c tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Deleting the datastore file [datastore1] 8826c266-659c-46ad-bb02-aefdffab8699 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2117.306512] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c4cc65a3-adaf-47b0-987b-15887f381801 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2117.314880] env[63024]: DEBUG oslo_vmware.api [None req-345d0855-494a-4079-b4ed-71d4166b462c tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 2117.314880] env[63024]: value = "task-1951776" [ 2117.314880] env[63024]: _type = "Task" [ 2117.314880] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2117.323263] env[63024]: DEBUG oslo_vmware.api [None req-345d0855-494a-4079-b4ed-71d4166b462c tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951776, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2117.560354] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2117.824962] env[63024]: DEBUG oslo_vmware.api [None req-345d0855-494a-4079-b4ed-71d4166b462c tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951776, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.165672} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2117.828064] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-345d0855-494a-4079-b4ed-71d4166b462c tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2117.828286] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-345d0855-494a-4079-b4ed-71d4166b462c tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2117.828471] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-345d0855-494a-4079-b4ed-71d4166b462c tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2117.828643] env[63024]: INFO nova.compute.manager [None req-345d0855-494a-4079-b4ed-71d4166b462c tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Took 4.59 seconds to destroy the instance on the hypervisor. [ 2117.828887] env[63024]: DEBUG oslo.service.loopingcall [None req-345d0855-494a-4079-b4ed-71d4166b462c tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2117.829104] env[63024]: DEBUG nova.compute.manager [-] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2117.829197] env[63024]: DEBUG nova.network.neutron [-] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2117.914688] env[63024]: DEBUG nova.compute.manager [req-2252631f-df1a-484b-a224-9cdd82700729 req-6681269f-409b-4953-b143-7859b15bd434 service nova] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Received event network-vif-plugged-bd02ff9f-c94b-4697-90ee-516a789ffac6 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2117.914914] env[63024]: DEBUG oslo_concurrency.lockutils [req-2252631f-df1a-484b-a224-9cdd82700729 req-6681269f-409b-4953-b143-7859b15bd434 service nova] Acquiring lock "3f350c3e-e9b3-4798-a424-fd32235d21cf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2117.915143] env[63024]: DEBUG oslo_concurrency.lockutils [req-2252631f-df1a-484b-a224-9cdd82700729 req-6681269f-409b-4953-b143-7859b15bd434 service nova] Lock "3f350c3e-e9b3-4798-a424-fd32235d21cf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2117.915318] env[63024]: DEBUG oslo_concurrency.lockutils [req-2252631f-df1a-484b-a224-9cdd82700729 req-6681269f-409b-4953-b143-7859b15bd434 service nova] Lock "3f350c3e-e9b3-4798-a424-fd32235d21cf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2117.915484] env[63024]: DEBUG nova.compute.manager [req-2252631f-df1a-484b-a224-9cdd82700729 req-6681269f-409b-4953-b143-7859b15bd434 service nova] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] No waiting events found dispatching network-vif-plugged-bd02ff9f-c94b-4697-90ee-516a789ffac6 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2117.915650] env[63024]: WARNING nova.compute.manager [req-2252631f-df1a-484b-a224-9cdd82700729 req-6681269f-409b-4953-b143-7859b15bd434 service nova] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Received unexpected event network-vif-plugged-bd02ff9f-c94b-4697-90ee-516a789ffac6 for instance with vm_state building and task_state spawning. [ 2118.052965] env[63024]: DEBUG nova.network.neutron [req-90d53bab-f7af-46d5-a005-5850ceb68ee6 req-52036ff4-1b83-4116-bc05-9ecc02d40966 service nova] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Updated VIF entry in instance network info cache for port 209c547a-fef6-4e81-9221-59b72099faa5. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2118.053368] env[63024]: DEBUG nova.network.neutron [req-90d53bab-f7af-46d5-a005-5850ceb68ee6 req-52036ff4-1b83-4116-bc05-9ecc02d40966 service nova] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Updating instance_info_cache with network_info: [{"id": "209c547a-fef6-4e81-9221-59b72099faa5", "address": "fa:16:3e:f0:17:5b", "network": {"id": "8b46e914-e3e8-40c4-8f9d-01d1f97bf25e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1558746173-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bbfeec6d47746328f185acd132e0d5a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afae6acd-1873-4228-9d5a-1cd5d4efe3e4", "external-id": "nsx-vlan-transportzone-183", "segmentation_id": 183, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap209c547a-fe", "ovs_interfaceid": "209c547a-fef6-4e81-9221-59b72099faa5", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2118.238378] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-020d415d-b64a-4f5e-875f-9f0bfae34a60 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.248222] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1de4f91-24f4-4d8b-881b-e560b5267fff {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.279689] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9937babf-c4ca-4328-b4b5-3453f13b73ba {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.288557] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d564af0-bf65-4d63-9346-9b5ec915497d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2118.298338] env[63024]: DEBUG nova.network.neutron [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Successfully updated port: bd02ff9f-c94b-4697-90ee-516a789ffac6 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2118.310937] env[63024]: DEBUG nova.compute.provider_tree [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2118.556581] env[63024]: DEBUG oslo_concurrency.lockutils [req-90d53bab-f7af-46d5-a005-5850ceb68ee6 req-52036ff4-1b83-4116-bc05-9ecc02d40966 service nova] Releasing lock "refresh_cache-e8ad74ce-7862-4574-98e7-14bc54bd5d6c" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2118.575789] env[63024]: DEBUG nova.compute.manager [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Stashing vm_state: active {{(pid=63024) _prep_resize /opt/stack/nova/nova/compute/manager.py:5954}} [ 2118.672282] env[63024]: DEBUG nova.network.neutron [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Successfully updated port: 61d84146-ea46-4ba6-ab7a-7e81dec991d2 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2118.805610] env[63024]: DEBUG oslo_concurrency.lockutils [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "refresh_cache-3f350c3e-e9b3-4798-a424-fd32235d21cf" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2118.805610] env[63024]: DEBUG oslo_concurrency.lockutils [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquired lock "refresh_cache-3f350c3e-e9b3-4798-a424-fd32235d21cf" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2118.805610] env[63024]: DEBUG nova.network.neutron [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2118.817110] env[63024]: DEBUG nova.scheduler.client.report [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2119.097521] env[63024]: DEBUG oslo_concurrency.lockutils [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2119.114647] env[63024]: DEBUG nova.compute.manager [req-dbea91cf-084b-426f-bb99-78d6bb5b9d2e req-3a973aba-b4e9-4a0b-a7d4-58ea0a828b92 service nova] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Received event network-changed-d9aaf9d8-7a03-4fe5-8494-40497e138b13 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2119.114647] env[63024]: DEBUG nova.compute.manager [req-dbea91cf-084b-426f-bb99-78d6bb5b9d2e req-3a973aba-b4e9-4a0b-a7d4-58ea0a828b92 service nova] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Refreshing instance network info cache due to event network-changed-d9aaf9d8-7a03-4fe5-8494-40497e138b13. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2119.114647] env[63024]: DEBUG oslo_concurrency.lockutils [req-dbea91cf-084b-426f-bb99-78d6bb5b9d2e req-3a973aba-b4e9-4a0b-a7d4-58ea0a828b92 service nova] Acquiring lock "refresh_cache-a483e6b5-a192-4cfe-be36-1ce0667f5697" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2119.114647] env[63024]: DEBUG oslo_concurrency.lockutils [req-dbea91cf-084b-426f-bb99-78d6bb5b9d2e req-3a973aba-b4e9-4a0b-a7d4-58ea0a828b92 service nova] Acquired lock "refresh_cache-a483e6b5-a192-4cfe-be36-1ce0667f5697" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2119.114647] env[63024]: DEBUG nova.network.neutron [req-dbea91cf-084b-426f-bb99-78d6bb5b9d2e req-3a973aba-b4e9-4a0b-a7d4-58ea0a828b92 service nova] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Refreshing network info cache for port d9aaf9d8-7a03-4fe5-8494-40497e138b13 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2119.177413] env[63024]: DEBUG oslo_concurrency.lockutils [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquiring lock "refresh_cache-94893f45-fb96-463b-82a9-e2fd884b81f8" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2119.177413] env[63024]: DEBUG oslo_concurrency.lockutils [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquired lock "refresh_cache-94893f45-fb96-463b-82a9-e2fd884b81f8" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2119.177413] env[63024]: DEBUG nova.network.neutron [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2119.322025] env[63024]: DEBUG oslo_concurrency.lockutils [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.270s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2119.322025] env[63024]: DEBUG nova.compute.manager [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2119.324578] env[63024]: DEBUG oslo_concurrency.lockutils [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.227s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2119.335025] env[63024]: DEBUG nova.network.neutron [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2119.501845] env[63024]: DEBUG nova.network.neutron [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Updating instance_info_cache with network_info: [{"id": "bd02ff9f-c94b-4697-90ee-516a789ffac6", "address": "fa:16:3e:62:3e:f4", "network": {"id": "b800daa8-f0f9-4823-92b0-ff8d853cc0ff", "bridge": "br-int", "label": "tempest-ServersTestJSON-1406445940-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18540818b60e4483963d14559bc5c38d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec763be6-4041-4651-8fd7-3820cf0ab86d", "external-id": "nsx-vlan-transportzone-943", "segmentation_id": 943, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd02ff9f-c9", "ovs_interfaceid": "bd02ff9f-c94b-4697-90ee-516a789ffac6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2119.521595] env[63024]: DEBUG nova.network.neutron [-] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2119.828504] env[63024]: DEBUG nova.compute.utils [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2119.833590] env[63024]: INFO nova.compute.claims [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2119.836789] env[63024]: DEBUG nova.compute.manager [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2119.836959] env[63024]: DEBUG nova.network.neutron [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2119.927456] env[63024]: DEBUG nova.network.neutron [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2119.997871] env[63024]: DEBUG nova.policy [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a7c32db2d81e40c492c1362d8356a03c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '93098ad83ae144bf90a12c97ec863c06', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 2120.004774] env[63024]: DEBUG oslo_concurrency.lockutils [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Releasing lock "refresh_cache-3f350c3e-e9b3-4798-a424-fd32235d21cf" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2120.005107] env[63024]: DEBUG nova.compute.manager [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Instance network_info: |[{"id": "bd02ff9f-c94b-4697-90ee-516a789ffac6", "address": "fa:16:3e:62:3e:f4", "network": {"id": "b800daa8-f0f9-4823-92b0-ff8d853cc0ff", "bridge": "br-int", "label": "tempest-ServersTestJSON-1406445940-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18540818b60e4483963d14559bc5c38d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec763be6-4041-4651-8fd7-3820cf0ab86d", "external-id": "nsx-vlan-transportzone-943", "segmentation_id": 943, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd02ff9f-c9", "ovs_interfaceid": "bd02ff9f-c94b-4697-90ee-516a789ffac6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2120.005567] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:62:3e:f4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ec763be6-4041-4651-8fd7-3820cf0ab86d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bd02ff9f-c94b-4697-90ee-516a789ffac6', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2120.013925] env[63024]: DEBUG oslo.service.loopingcall [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2120.014206] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2120.014509] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f661fc8d-f12e-4f63-af3a-650dacb3ab29 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.032197] env[63024]: INFO nova.compute.manager [-] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Took 2.20 seconds to deallocate network for instance. [ 2120.044138] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2120.044138] env[63024]: value = "task-1951777" [ 2120.044138] env[63024]: _type = "Task" [ 2120.044138] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2120.051474] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951777, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2120.084628] env[63024]: DEBUG nova.compute.manager [req-267676a6-f29e-442e-9fe3-5d8fdd56e447 req-e4569cfa-800e-4bdb-9812-d09cefd56e2b service nova] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Received event network-changed-bd02ff9f-c94b-4697-90ee-516a789ffac6 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2120.084931] env[63024]: DEBUG nova.compute.manager [req-267676a6-f29e-442e-9fe3-5d8fdd56e447 req-e4569cfa-800e-4bdb-9812-d09cefd56e2b service nova] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Refreshing instance network info cache due to event network-changed-bd02ff9f-c94b-4697-90ee-516a789ffac6. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2120.085069] env[63024]: DEBUG oslo_concurrency.lockutils [req-267676a6-f29e-442e-9fe3-5d8fdd56e447 req-e4569cfa-800e-4bdb-9812-d09cefd56e2b service nova] Acquiring lock "refresh_cache-3f350c3e-e9b3-4798-a424-fd32235d21cf" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2120.085222] env[63024]: DEBUG oslo_concurrency.lockutils [req-267676a6-f29e-442e-9fe3-5d8fdd56e447 req-e4569cfa-800e-4bdb-9812-d09cefd56e2b service nova] Acquired lock "refresh_cache-3f350c3e-e9b3-4798-a424-fd32235d21cf" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2120.085384] env[63024]: DEBUG nova.network.neutron [req-267676a6-f29e-442e-9fe3-5d8fdd56e447 req-e4569cfa-800e-4bdb-9812-d09cefd56e2b service nova] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Refreshing network info cache for port bd02ff9f-c94b-4697-90ee-516a789ffac6 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2120.327051] env[63024]: DEBUG nova.network.neutron [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Updating instance_info_cache with network_info: [{"id": "61d84146-ea46-4ba6-ab7a-7e81dec991d2", "address": "fa:16:3e:52:e4:c0", "network": {"id": "18684658-e754-4649-b059-43f84e447803", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-48651862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d0715f0ccbd49ec8af8e3049d970994", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98011432-48cc-4ffd-a5a8-b96d2ea4424a", "external-id": "nsx-vlan-transportzone-745", "segmentation_id": 745, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61d84146-ea", "ovs_interfaceid": "61d84146-ea46-4ba6-ab7a-7e81dec991d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2120.337489] env[63024]: DEBUG nova.compute.manager [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2120.342556] env[63024]: INFO nova.compute.resource_tracker [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Updating resource usage from migration f50108e4-2aa2-4aaa-b88b-5fb59917d283 [ 2120.435204] env[63024]: DEBUG nova.network.neutron [req-dbea91cf-084b-426f-bb99-78d6bb5b9d2e req-3a973aba-b4e9-4a0b-a7d4-58ea0a828b92 service nova] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Updated VIF entry in instance network info cache for port d9aaf9d8-7a03-4fe5-8494-40497e138b13. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2120.435581] env[63024]: DEBUG nova.network.neutron [req-dbea91cf-084b-426f-bb99-78d6bb5b9d2e req-3a973aba-b4e9-4a0b-a7d4-58ea0a828b92 service nova] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Updating instance_info_cache with network_info: [{"id": "d9aaf9d8-7a03-4fe5-8494-40497e138b13", "address": "fa:16:3e:37:15:c7", "network": {"id": "8b46e914-e3e8-40c4-8f9d-01d1f97bf25e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1558746173-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bbfeec6d47746328f185acd132e0d5a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afae6acd-1873-4228-9d5a-1cd5d4efe3e4", "external-id": "nsx-vlan-transportzone-183", "segmentation_id": 183, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9aaf9d8-7a", "ovs_interfaceid": "d9aaf9d8-7a03-4fe5-8494-40497e138b13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2120.529679] env[63024]: DEBUG nova.network.neutron [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Successfully created port: 5b820f6b-5e1e-4bf8-a434-55cbc9e7968d {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2120.537492] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d6e0618-33e4-4560-8cc7-207efe0abef0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.541376] env[63024]: DEBUG oslo_concurrency.lockutils [None req-345d0855-494a-4079-b4ed-71d4166b462c tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2120.547997] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14f052f0-625f-405f-8040-cc9a9822b603 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.554118] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951777, 'name': CreateVM_Task, 'duration_secs': 0.399023} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2120.554605] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2120.555313] env[63024]: DEBUG oslo_concurrency.lockutils [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2120.555481] env[63024]: DEBUG oslo_concurrency.lockutils [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2120.555811] env[63024]: DEBUG oslo_concurrency.lockutils [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2120.556126] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd5cb0bc-d076-4338-aa00-34eb58957c06 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.585752] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72c7a3eb-b1ff-46ec-af19-f2a917c8e119 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.591341] env[63024]: DEBUG oslo_vmware.api [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 2120.591341] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5276449c-9139-0a25-f535-af410001b3ae" [ 2120.591341] env[63024]: _type = "Task" [ 2120.591341] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2120.598509] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc0b8edb-cf6d-426f-a6a6-34c1e531b026 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.605703] env[63024]: DEBUG oslo_vmware.api [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5276449c-9139-0a25-f535-af410001b3ae, 'name': SearchDatastore_Task, 'duration_secs': 0.009601} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2120.606324] env[63024]: DEBUG oslo_concurrency.lockutils [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2120.606557] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2120.606795] env[63024]: DEBUG oslo_concurrency.lockutils [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2120.607093] env[63024]: DEBUG oslo_concurrency.lockutils [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2120.607159] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2120.607407] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ae46b87b-034a-42f5-ae67-cf51edbf24b3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.616676] env[63024]: DEBUG nova.compute.provider_tree [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2120.625443] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2120.625627] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2120.626595] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-71031b45-3f10-4d79-8bf2-0d609e714e55 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.631930] env[63024]: DEBUG oslo_vmware.api [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 2120.631930] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]529e4b3b-44d1-0a64-7b89-b8995503b256" [ 2120.631930] env[63024]: _type = "Task" [ 2120.631930] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2120.639794] env[63024]: DEBUG oslo_vmware.api [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]529e4b3b-44d1-0a64-7b89-b8995503b256, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2120.829928] env[63024]: DEBUG oslo_concurrency.lockutils [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Releasing lock "refresh_cache-94893f45-fb96-463b-82a9-e2fd884b81f8" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2120.830292] env[63024]: DEBUG nova.compute.manager [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Instance network_info: |[{"id": "61d84146-ea46-4ba6-ab7a-7e81dec991d2", "address": "fa:16:3e:52:e4:c0", "network": {"id": "18684658-e754-4649-b059-43f84e447803", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-48651862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d0715f0ccbd49ec8af8e3049d970994", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98011432-48cc-4ffd-a5a8-b96d2ea4424a", "external-id": "nsx-vlan-transportzone-745", "segmentation_id": 745, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61d84146-ea", "ovs_interfaceid": "61d84146-ea46-4ba6-ab7a-7e81dec991d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2120.830719] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:52:e4:c0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '98011432-48cc-4ffd-a5a8-b96d2ea4424a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '61d84146-ea46-4ba6-ab7a-7e81dec991d2', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2120.838540] env[63024]: DEBUG oslo.service.loopingcall [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2120.838772] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2120.839022] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-75add25d-f710-47b9-b2d4-07391adebaa3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2120.864868] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2120.864868] env[63024]: value = "task-1951778" [ 2120.864868] env[63024]: _type = "Task" [ 2120.864868] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2120.873385] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951778, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2120.933883] env[63024]: DEBUG nova.network.neutron [req-267676a6-f29e-442e-9fe3-5d8fdd56e447 req-e4569cfa-800e-4bdb-9812-d09cefd56e2b service nova] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Updated VIF entry in instance network info cache for port bd02ff9f-c94b-4697-90ee-516a789ffac6. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2120.934343] env[63024]: DEBUG nova.network.neutron [req-267676a6-f29e-442e-9fe3-5d8fdd56e447 req-e4569cfa-800e-4bdb-9812-d09cefd56e2b service nova] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Updating instance_info_cache with network_info: [{"id": "bd02ff9f-c94b-4697-90ee-516a789ffac6", "address": "fa:16:3e:62:3e:f4", "network": {"id": "b800daa8-f0f9-4823-92b0-ff8d853cc0ff", "bridge": "br-int", "label": "tempest-ServersTestJSON-1406445940-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "18540818b60e4483963d14559bc5c38d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ec763be6-4041-4651-8fd7-3820cf0ab86d", "external-id": "nsx-vlan-transportzone-943", "segmentation_id": 943, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd02ff9f-c9", "ovs_interfaceid": "bd02ff9f-c94b-4697-90ee-516a789ffac6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2120.938106] env[63024]: DEBUG oslo_concurrency.lockutils [req-dbea91cf-084b-426f-bb99-78d6bb5b9d2e req-3a973aba-b4e9-4a0b-a7d4-58ea0a828b92 service nova] Releasing lock "refresh_cache-a483e6b5-a192-4cfe-be36-1ce0667f5697" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2121.119447] env[63024]: DEBUG nova.scheduler.client.report [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2121.143696] env[63024]: DEBUG oslo_vmware.api [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]529e4b3b-44d1-0a64-7b89-b8995503b256, 'name': SearchDatastore_Task, 'duration_secs': 0.008503} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2121.144684] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7dc166e8-609c-41b2-8a60-13c4cf65dec6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.150829] env[63024]: DEBUG oslo_vmware.api [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 2121.150829] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b61580-5845-bc49-ea81-e13533ee127e" [ 2121.150829] env[63024]: _type = "Task" [ 2121.150829] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2121.159929] env[63024]: DEBUG oslo_vmware.api [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b61580-5845-bc49-ea81-e13533ee127e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2121.360237] env[63024]: DEBUG nova.compute.manager [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2121.374899] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951778, 'name': CreateVM_Task, 'duration_secs': 0.500667} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2121.375081] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2121.375742] env[63024]: DEBUG oslo_concurrency.lockutils [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2121.375909] env[63024]: DEBUG oslo_concurrency.lockutils [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2121.376236] env[63024]: DEBUG oslo_concurrency.lockutils [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2121.376481] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6ecd2b1-e22f-49ac-930a-399a434073ab {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.382574] env[63024]: DEBUG oslo_vmware.api [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 2121.382574] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52a8a36f-c903-39bc-8711-3186b6a571b3" [ 2121.382574] env[63024]: _type = "Task" [ 2121.382574] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2121.387358] env[63024]: DEBUG nova.virt.hardware [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2121.387580] env[63024]: DEBUG nova.virt.hardware [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2121.387814] env[63024]: DEBUG nova.virt.hardware [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2121.388056] env[63024]: DEBUG nova.virt.hardware [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2121.388215] env[63024]: DEBUG nova.virt.hardware [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2121.388361] env[63024]: DEBUG nova.virt.hardware [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2121.388564] env[63024]: DEBUG nova.virt.hardware [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2121.388724] env[63024]: DEBUG nova.virt.hardware [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2121.388889] env[63024]: DEBUG nova.virt.hardware [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2121.389059] env[63024]: DEBUG nova.virt.hardware [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2121.389237] env[63024]: DEBUG nova.virt.hardware [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2121.389938] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aecf5fce-2c7f-429f-b9ec-83a2db1e8e6c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.397345] env[63024]: DEBUG oslo_vmware.api [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52a8a36f-c903-39bc-8711-3186b6a571b3, 'name': SearchDatastore_Task, 'duration_secs': 0.009543} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2121.399464] env[63024]: DEBUG oslo_concurrency.lockutils [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2121.399550] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2121.399697] env[63024]: DEBUG oslo_concurrency.lockutils [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2121.400897] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c65bea73-a0d9-459d-89b4-5c4ff4681f7b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.436715] env[63024]: DEBUG oslo_concurrency.lockutils [req-267676a6-f29e-442e-9fe3-5d8fdd56e447 req-e4569cfa-800e-4bdb-9812-d09cefd56e2b service nova] Releasing lock "refresh_cache-3f350c3e-e9b3-4798-a424-fd32235d21cf" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2121.436941] env[63024]: DEBUG nova.compute.manager [req-267676a6-f29e-442e-9fe3-5d8fdd56e447 req-e4569cfa-800e-4bdb-9812-d09cefd56e2b service nova] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Received event network-vif-plugged-61d84146-ea46-4ba6-ab7a-7e81dec991d2 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2121.437147] env[63024]: DEBUG oslo_concurrency.lockutils [req-267676a6-f29e-442e-9fe3-5d8fdd56e447 req-e4569cfa-800e-4bdb-9812-d09cefd56e2b service nova] Acquiring lock "94893f45-fb96-463b-82a9-e2fd884b81f8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2121.437349] env[63024]: DEBUG oslo_concurrency.lockutils [req-267676a6-f29e-442e-9fe3-5d8fdd56e447 req-e4569cfa-800e-4bdb-9812-d09cefd56e2b service nova] Lock "94893f45-fb96-463b-82a9-e2fd884b81f8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2121.437512] env[63024]: DEBUG oslo_concurrency.lockutils [req-267676a6-f29e-442e-9fe3-5d8fdd56e447 req-e4569cfa-800e-4bdb-9812-d09cefd56e2b service nova] Lock "94893f45-fb96-463b-82a9-e2fd884b81f8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2121.437677] env[63024]: DEBUG nova.compute.manager [req-267676a6-f29e-442e-9fe3-5d8fdd56e447 req-e4569cfa-800e-4bdb-9812-d09cefd56e2b service nova] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] No waiting events found dispatching network-vif-plugged-61d84146-ea46-4ba6-ab7a-7e81dec991d2 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2121.437840] env[63024]: WARNING nova.compute.manager [req-267676a6-f29e-442e-9fe3-5d8fdd56e447 req-e4569cfa-800e-4bdb-9812-d09cefd56e2b service nova] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Received unexpected event network-vif-plugged-61d84146-ea46-4ba6-ab7a-7e81dec991d2 for instance with vm_state building and task_state spawning. [ 2121.438011] env[63024]: DEBUG nova.compute.manager [req-267676a6-f29e-442e-9fe3-5d8fdd56e447 req-e4569cfa-800e-4bdb-9812-d09cefd56e2b service nova] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Received event network-changed-61d84146-ea46-4ba6-ab7a-7e81dec991d2 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2121.438170] env[63024]: DEBUG nova.compute.manager [req-267676a6-f29e-442e-9fe3-5d8fdd56e447 req-e4569cfa-800e-4bdb-9812-d09cefd56e2b service nova] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Refreshing instance network info cache due to event network-changed-61d84146-ea46-4ba6-ab7a-7e81dec991d2. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2121.438353] env[63024]: DEBUG oslo_concurrency.lockutils [req-267676a6-f29e-442e-9fe3-5d8fdd56e447 req-e4569cfa-800e-4bdb-9812-d09cefd56e2b service nova] Acquiring lock "refresh_cache-94893f45-fb96-463b-82a9-e2fd884b81f8" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2121.438490] env[63024]: DEBUG oslo_concurrency.lockutils [req-267676a6-f29e-442e-9fe3-5d8fdd56e447 req-e4569cfa-800e-4bdb-9812-d09cefd56e2b service nova] Acquired lock "refresh_cache-94893f45-fb96-463b-82a9-e2fd884b81f8" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2121.438706] env[63024]: DEBUG nova.network.neutron [req-267676a6-f29e-442e-9fe3-5d8fdd56e447 req-e4569cfa-800e-4bdb-9812-d09cefd56e2b service nova] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Refreshing network info cache for port 61d84146-ea46-4ba6-ab7a-7e81dec991d2 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2121.625759] env[63024]: DEBUG oslo_concurrency.lockutils [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.300s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2121.625759] env[63024]: INFO nova.compute.manager [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Migrating [ 2121.632537] env[63024]: DEBUG oslo_concurrency.lockutils [None req-345d0855-494a-4079-b4ed-71d4166b462c tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.091s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2121.632797] env[63024]: DEBUG nova.objects.instance [None req-345d0855-494a-4079-b4ed-71d4166b462c tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lazy-loading 'resources' on Instance uuid 8826c266-659c-46ad-bb02-aefdffab8699 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2121.661668] env[63024]: DEBUG oslo_vmware.api [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b61580-5845-bc49-ea81-e13533ee127e, 'name': SearchDatastore_Task, 'duration_secs': 0.011316} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2121.661957] env[63024]: DEBUG oslo_concurrency.lockutils [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2121.662217] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 3f350c3e-e9b3-4798-a424-fd32235d21cf/3f350c3e-e9b3-4798-a424-fd32235d21cf.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2121.662495] env[63024]: DEBUG oslo_concurrency.lockutils [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2121.662685] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2121.662904] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b320cf84-9a67-4e84-9bfe-d8d96cffb9c5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.664804] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ea987655-dc66-401f-a32f-6b3553744b75 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.673441] env[63024]: DEBUG oslo_vmware.api [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 2121.673441] env[63024]: value = "task-1951779" [ 2121.673441] env[63024]: _type = "Task" [ 2121.673441] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2121.677448] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2121.677626] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2121.678607] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb2b4c9c-2a7d-4d2a-b671-b67a4dedc9a0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2121.683544] env[63024]: DEBUG oslo_vmware.api [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951779, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2121.686680] env[63024]: DEBUG oslo_vmware.api [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 2121.686680] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]522c6e8f-c606-7a4e-f2aa-6755379760d1" [ 2121.686680] env[63024]: _type = "Task" [ 2121.686680] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2121.695905] env[63024]: DEBUG oslo_vmware.api [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]522c6e8f-c606-7a4e-f2aa-6755379760d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2122.143728] env[63024]: DEBUG oslo_concurrency.lockutils [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "refresh_cache-a483e6b5-a192-4cfe-be36-1ce0667f5697" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2122.143728] env[63024]: DEBUG oslo_concurrency.lockutils [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquired lock "refresh_cache-a483e6b5-a192-4cfe-be36-1ce0667f5697" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2122.143881] env[63024]: DEBUG nova.network.neutron [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2122.186882] env[63024]: DEBUG oslo_vmware.api [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951779, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.456494} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2122.187237] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 3f350c3e-e9b3-4798-a424-fd32235d21cf/3f350c3e-e9b3-4798-a424-fd32235d21cf.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2122.187448] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2122.190763] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-794a91bc-f911-4164-b0b4-7111386ec969 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.200757] env[63024]: DEBUG oslo_vmware.api [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]522c6e8f-c606-7a4e-f2aa-6755379760d1, 'name': SearchDatastore_Task, 'duration_secs': 0.008694} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2122.206448] env[63024]: DEBUG oslo_vmware.api [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 2122.206448] env[63024]: value = "task-1951780" [ 2122.206448] env[63024]: _type = "Task" [ 2122.206448] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2122.208987] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5807ab51-2a7e-42b5-8205-2b2503a4aa73 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.219392] env[63024]: DEBUG oslo_vmware.api [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 2122.219392] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52fe14de-e66a-2883-5199-c80684537d57" [ 2122.219392] env[63024]: _type = "Task" [ 2122.219392] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2122.223348] env[63024]: DEBUG oslo_vmware.api [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951780, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2122.237616] env[63024]: DEBUG oslo_vmware.api [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52fe14de-e66a-2883-5199-c80684537d57, 'name': SearchDatastore_Task, 'duration_secs': 0.009499} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2122.237616] env[63024]: DEBUG oslo_concurrency.lockutils [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2122.237616] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 94893f45-fb96-463b-82a9-e2fd884b81f8/94893f45-fb96-463b-82a9-e2fd884b81f8.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2122.237616] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-56b4d856-e9e0-4ac9-9266-4827adf136ec {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.244640] env[63024]: DEBUG oslo_vmware.api [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 2122.244640] env[63024]: value = "task-1951781" [ 2122.244640] env[63024]: _type = "Task" [ 2122.244640] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2122.256573] env[63024]: DEBUG oslo_vmware.api [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951781, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2122.310299] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7e9b434f-1c03-44e6-a4df-c8d086919675 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Acquiring lock "5192ad93-a4e9-4aa0-983d-186ab17360f0" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2122.310591] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7e9b434f-1c03-44e6-a4df-c8d086919675 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lock "5192ad93-a4e9-4aa0-983d-186ab17360f0" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2122.367880] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dce3df47-939f-42da-b629-f86e58f742ee {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.376651] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d23e06d4-4b02-484c-8cc2-26f822087573 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.413944] env[63024]: DEBUG nova.network.neutron [req-267676a6-f29e-442e-9fe3-5d8fdd56e447 req-e4569cfa-800e-4bdb-9812-d09cefd56e2b service nova] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Updated VIF entry in instance network info cache for port 61d84146-ea46-4ba6-ab7a-7e81dec991d2. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2122.414439] env[63024]: DEBUG nova.network.neutron [req-267676a6-f29e-442e-9fe3-5d8fdd56e447 req-e4569cfa-800e-4bdb-9812-d09cefd56e2b service nova] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Updating instance_info_cache with network_info: [{"id": "61d84146-ea46-4ba6-ab7a-7e81dec991d2", "address": "fa:16:3e:52:e4:c0", "network": {"id": "18684658-e754-4649-b059-43f84e447803", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-48651862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d0715f0ccbd49ec8af8e3049d970994", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98011432-48cc-4ffd-a5a8-b96d2ea4424a", "external-id": "nsx-vlan-transportzone-745", "segmentation_id": 745, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61d84146-ea", "ovs_interfaceid": "61d84146-ea46-4ba6-ab7a-7e81dec991d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2122.416595] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4be4a289-234c-4d55-8125-a0756c3114e4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.427854] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60d17894-b36b-48f4-b334-5a62a3bfa0c0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.445812] env[63024]: DEBUG nova.compute.provider_tree [None req-345d0855-494a-4079-b4ed-71d4166b462c tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2122.492397] env[63024]: DEBUG nova.compute.manager [req-9e4647a4-3a8f-478c-8dad-0b5d16b1f4d6 req-753333a2-8b34-4237-89c8-e931719149a7 service nova] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Received event network-vif-plugged-5b820f6b-5e1e-4bf8-a434-55cbc9e7968d {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2122.492639] env[63024]: DEBUG oslo_concurrency.lockutils [req-9e4647a4-3a8f-478c-8dad-0b5d16b1f4d6 req-753333a2-8b34-4237-89c8-e931719149a7 service nova] Acquiring lock "beefd67c-b791-4c19-822b-b0e21ec5f8ac-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2122.492935] env[63024]: DEBUG oslo_concurrency.lockutils [req-9e4647a4-3a8f-478c-8dad-0b5d16b1f4d6 req-753333a2-8b34-4237-89c8-e931719149a7 service nova] Lock "beefd67c-b791-4c19-822b-b0e21ec5f8ac-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2122.493445] env[63024]: DEBUG oslo_concurrency.lockutils [req-9e4647a4-3a8f-478c-8dad-0b5d16b1f4d6 req-753333a2-8b34-4237-89c8-e931719149a7 service nova] Lock "beefd67c-b791-4c19-822b-b0e21ec5f8ac-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2122.493593] env[63024]: DEBUG nova.compute.manager [req-9e4647a4-3a8f-478c-8dad-0b5d16b1f4d6 req-753333a2-8b34-4237-89c8-e931719149a7 service nova] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] No waiting events found dispatching network-vif-plugged-5b820f6b-5e1e-4bf8-a434-55cbc9e7968d {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2122.493989] env[63024]: WARNING nova.compute.manager [req-9e4647a4-3a8f-478c-8dad-0b5d16b1f4d6 req-753333a2-8b34-4237-89c8-e931719149a7 service nova] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Received unexpected event network-vif-plugged-5b820f6b-5e1e-4bf8-a434-55cbc9e7968d for instance with vm_state building and task_state spawning. [ 2122.723321] env[63024]: DEBUG oslo_vmware.api [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951780, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064764} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2122.723618] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2122.724532] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74d1b61c-2eaf-4667-84cf-2749ff189ea1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.746068] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Reconfiguring VM instance instance-00000070 to attach disk [datastore1] 3f350c3e-e9b3-4798-a424-fd32235d21cf/3f350c3e-e9b3-4798-a424-fd32235d21cf.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2122.746356] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-539ef5f3-079a-4da5-af24-2d50c0c338e3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.772249] env[63024]: DEBUG oslo_vmware.api [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951781, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.461263} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2122.773541] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 94893f45-fb96-463b-82a9-e2fd884b81f8/94893f45-fb96-463b-82a9-e2fd884b81f8.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2122.773774] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2122.774140] env[63024]: DEBUG oslo_vmware.api [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 2122.774140] env[63024]: value = "task-1951782" [ 2122.774140] env[63024]: _type = "Task" [ 2122.774140] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2122.774336] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-56dc8ff6-e149-4357-bf1a-60aab87d56ad {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2122.784790] env[63024]: DEBUG oslo_vmware.api [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951782, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2122.786205] env[63024]: DEBUG oslo_vmware.api [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 2122.786205] env[63024]: value = "task-1951783" [ 2122.786205] env[63024]: _type = "Task" [ 2122.786205] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2122.799481] env[63024]: DEBUG oslo_vmware.api [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951783, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2122.813836] env[63024]: DEBUG nova.compute.utils [None req-7e9b434f-1c03-44e6-a4df-c8d086919675 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2122.921199] env[63024]: DEBUG oslo_concurrency.lockutils [req-267676a6-f29e-442e-9fe3-5d8fdd56e447 req-e4569cfa-800e-4bdb-9812-d09cefd56e2b service nova] Releasing lock "refresh_cache-94893f45-fb96-463b-82a9-e2fd884b81f8" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2122.921570] env[63024]: DEBUG nova.compute.manager [req-267676a6-f29e-442e-9fe3-5d8fdd56e447 req-e4569cfa-800e-4bdb-9812-d09cefd56e2b service nova] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Received event network-vif-deleted-340baee8-fd68-482a-94ce-82df41470c62 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2122.949177] env[63024]: DEBUG nova.scheduler.client.report [None req-345d0855-494a-4079-b4ed-71d4166b462c tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2122.958119] env[63024]: DEBUG nova.network.neutron [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Updating instance_info_cache with network_info: [{"id": "d9aaf9d8-7a03-4fe5-8494-40497e138b13", "address": "fa:16:3e:37:15:c7", "network": {"id": "8b46e914-e3e8-40c4-8f9d-01d1f97bf25e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1558746173-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bbfeec6d47746328f185acd132e0d5a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afae6acd-1873-4228-9d5a-1cd5d4efe3e4", "external-id": "nsx-vlan-transportzone-183", "segmentation_id": 183, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9aaf9d8-7a", "ovs_interfaceid": "d9aaf9d8-7a03-4fe5-8494-40497e138b13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2122.965160] env[63024]: DEBUG nova.network.neutron [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Successfully updated port: 5b820f6b-5e1e-4bf8-a434-55cbc9e7968d {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2122.989576] env[63024]: DEBUG nova.compute.manager [req-ee6771ab-a451-4cc4-9af3-00e3b75c7a22 req-c0065c59-0db2-4de3-9a0a-c0fad10eaafa service nova] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Received event network-changed-5b820f6b-5e1e-4bf8-a434-55cbc9e7968d {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2122.989785] env[63024]: DEBUG nova.compute.manager [req-ee6771ab-a451-4cc4-9af3-00e3b75c7a22 req-c0065c59-0db2-4de3-9a0a-c0fad10eaafa service nova] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Refreshing instance network info cache due to event network-changed-5b820f6b-5e1e-4bf8-a434-55cbc9e7968d. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2122.990008] env[63024]: DEBUG oslo_concurrency.lockutils [req-ee6771ab-a451-4cc4-9af3-00e3b75c7a22 req-c0065c59-0db2-4de3-9a0a-c0fad10eaafa service nova] Acquiring lock "refresh_cache-beefd67c-b791-4c19-822b-b0e21ec5f8ac" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2122.990804] env[63024]: DEBUG oslo_concurrency.lockutils [req-ee6771ab-a451-4cc4-9af3-00e3b75c7a22 req-c0065c59-0db2-4de3-9a0a-c0fad10eaafa service nova] Acquired lock "refresh_cache-beefd67c-b791-4c19-822b-b0e21ec5f8ac" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2122.991009] env[63024]: DEBUG nova.network.neutron [req-ee6771ab-a451-4cc4-9af3-00e3b75c7a22 req-c0065c59-0db2-4de3-9a0a-c0fad10eaafa service nova] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Refreshing network info cache for port 5b820f6b-5e1e-4bf8-a434-55cbc9e7968d {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2123.287053] env[63024]: DEBUG oslo_vmware.api [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951782, 'name': ReconfigVM_Task, 'duration_secs': 0.29207} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2123.287220] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Reconfigured VM instance instance-00000070 to attach disk [datastore1] 3f350c3e-e9b3-4798-a424-fd32235d21cf/3f350c3e-e9b3-4798-a424-fd32235d21cf.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2123.288112] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e7d61959-d1b8-473c-ac62-92059e0f81e2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.297585] env[63024]: DEBUG oslo_vmware.api [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951783, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.177673} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2123.298785] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2123.299135] env[63024]: DEBUG oslo_vmware.api [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 2123.299135] env[63024]: value = "task-1951784" [ 2123.299135] env[63024]: _type = "Task" [ 2123.299135] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2123.299794] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32de578e-a5de-47d8-982f-526eede7ef79 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.316608] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7e9b434f-1c03-44e6-a4df-c8d086919675 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lock "5192ad93-a4e9-4aa0-983d-186ab17360f0" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2123.326293] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] 94893f45-fb96-463b-82a9-e2fd884b81f8/94893f45-fb96-463b-82a9-e2fd884b81f8.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2123.329597] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a4f5b3af-57e0-43c8-a9ef-d2f6547b2e7e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.344248] env[63024]: DEBUG oslo_vmware.api [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951784, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2123.353357] env[63024]: DEBUG oslo_vmware.api [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 2123.353357] env[63024]: value = "task-1951785" [ 2123.353357] env[63024]: _type = "Task" [ 2123.353357] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2123.362737] env[63024]: DEBUG oslo_vmware.api [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951785, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2123.454926] env[63024]: DEBUG oslo_concurrency.lockutils [None req-345d0855-494a-4079-b4ed-71d4166b462c tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.822s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2123.460802] env[63024]: DEBUG oslo_concurrency.lockutils [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Releasing lock "refresh_cache-a483e6b5-a192-4cfe-be36-1ce0667f5697" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2123.467846] env[63024]: DEBUG oslo_concurrency.lockutils [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "refresh_cache-beefd67c-b791-4c19-822b-b0e21ec5f8ac" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2123.476368] env[63024]: INFO nova.scheduler.client.report [None req-345d0855-494a-4079-b4ed-71d4166b462c tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Deleted allocations for instance 8826c266-659c-46ad-bb02-aefdffab8699 [ 2123.540591] env[63024]: DEBUG nova.network.neutron [req-ee6771ab-a451-4cc4-9af3-00e3b75c7a22 req-c0065c59-0db2-4de3-9a0a-c0fad10eaafa service nova] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2123.641832] env[63024]: DEBUG nova.network.neutron [req-ee6771ab-a451-4cc4-9af3-00e3b75c7a22 req-c0065c59-0db2-4de3-9a0a-c0fad10eaafa service nova] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2123.812935] env[63024]: DEBUG oslo_vmware.api [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951784, 'name': Rename_Task, 'duration_secs': 0.158349} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2123.813237] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2123.813480] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0c0e3f4e-9251-4818-b999-a4854b2bba56 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2123.820953] env[63024]: DEBUG oslo_vmware.api [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 2123.820953] env[63024]: value = "task-1951786" [ 2123.820953] env[63024]: _type = "Task" [ 2123.820953] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2123.830540] env[63024]: DEBUG oslo_vmware.api [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951786, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2123.864308] env[63024]: DEBUG oslo_vmware.api [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951785, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2123.983578] env[63024]: DEBUG oslo_concurrency.lockutils [None req-345d0855-494a-4079-b4ed-71d4166b462c tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "8826c266-659c-46ad-bb02-aefdffab8699" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.250s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2124.145376] env[63024]: DEBUG oslo_concurrency.lockutils [req-ee6771ab-a451-4cc4-9af3-00e3b75c7a22 req-c0065c59-0db2-4de3-9a0a-c0fad10eaafa service nova] Releasing lock "refresh_cache-beefd67c-b791-4c19-822b-b0e21ec5f8ac" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2124.145790] env[63024]: DEBUG oslo_concurrency.lockutils [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquired lock "refresh_cache-beefd67c-b791-4c19-822b-b0e21ec5f8ac" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2124.146024] env[63024]: DEBUG nova.network.neutron [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2124.205363] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f6156d42-8649-4974-b2ba-5b39cb6b0b31 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "34e4db8e-e0d9-4a27-9368-c5e711b51a29" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2124.205586] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f6156d42-8649-4974-b2ba-5b39cb6b0b31 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "34e4db8e-e0d9-4a27-9368-c5e711b51a29" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2124.205801] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f6156d42-8649-4974-b2ba-5b39cb6b0b31 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "34e4db8e-e0d9-4a27-9368-c5e711b51a29-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2124.206035] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f6156d42-8649-4974-b2ba-5b39cb6b0b31 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "34e4db8e-e0d9-4a27-9368-c5e711b51a29-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2124.206256] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f6156d42-8649-4974-b2ba-5b39cb6b0b31 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "34e4db8e-e0d9-4a27-9368-c5e711b51a29-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2124.208305] env[63024]: INFO nova.compute.manager [None req-f6156d42-8649-4974-b2ba-5b39cb6b0b31 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Terminating instance [ 2124.331472] env[63024]: DEBUG oslo_vmware.api [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951786, 'name': PowerOnVM_Task} progress is 87%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2124.363473] env[63024]: DEBUG oslo_vmware.api [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951785, 'name': ReconfigVM_Task, 'duration_secs': 0.967353} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2124.364186] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Reconfigured VM instance instance-00000071 to attach disk [datastore1] 94893f45-fb96-463b-82a9-e2fd884b81f8/94893f45-fb96-463b-82a9-e2fd884b81f8.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2124.364555] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-edbb21f0-33ab-41ea-832f-0c9709fcc8ed {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.372479] env[63024]: DEBUG oslo_vmware.api [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 2124.372479] env[63024]: value = "task-1951787" [ 2124.372479] env[63024]: _type = "Task" [ 2124.372479] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2124.381238] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7e9b434f-1c03-44e6-a4df-c8d086919675 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Acquiring lock "5192ad93-a4e9-4aa0-983d-186ab17360f0" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2124.381482] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7e9b434f-1c03-44e6-a4df-c8d086919675 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lock "5192ad93-a4e9-4aa0-983d-186ab17360f0" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2124.381724] env[63024]: INFO nova.compute.manager [None req-7e9b434f-1c03-44e6-a4df-c8d086919675 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Attaching volume fd669326-4ffa-49a6-9107-74d2b957b5f4 to /dev/sdb [ 2124.383402] env[63024]: DEBUG oslo_vmware.api [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951787, 'name': Rename_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2124.417338] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e7d8da5-ef7e-46f3-bd46-1f52f142f104 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.425413] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62ad8397-0c4e-4213-8e05-0574372ec3e6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.444021] env[63024]: DEBUG nova.virt.block_device [None req-7e9b434f-1c03-44e6-a4df-c8d086919675 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Updating existing volume attachment record: 1b8fae00-ff17-4e2b-9b37-1e7cc5eded14 {{(pid=63024) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2124.693287] env[63024]: DEBUG nova.network.neutron [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2124.712374] env[63024]: DEBUG nova.compute.manager [None req-f6156d42-8649-4974-b2ba-5b39cb6b0b31 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2124.712696] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f6156d42-8649-4974-b2ba-5b39cb6b0b31 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2124.713489] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b230d41f-f96b-43f0-8fd8-7f2fa165b047 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.721306] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6156d42-8649-4974-b2ba-5b39cb6b0b31 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2124.721561] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-de428d8a-38f2-4c1f-b674-8d56733c245c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.728397] env[63024]: DEBUG oslo_vmware.api [None req-f6156d42-8649-4974-b2ba-5b39cb6b0b31 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 2124.728397] env[63024]: value = "task-1951789" [ 2124.728397] env[63024]: _type = "Task" [ 2124.728397] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2124.737403] env[63024]: DEBUG oslo_vmware.api [None req-f6156d42-8649-4974-b2ba-5b39cb6b0b31 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951789, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2124.835217] env[63024]: DEBUG oslo_vmware.api [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951786, 'name': PowerOnVM_Task, 'duration_secs': 0.61463} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2124.835905] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2124.835905] env[63024]: INFO nova.compute.manager [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Took 14.33 seconds to spawn the instance on the hypervisor. [ 2124.835905] env[63024]: DEBUG nova.compute.manager [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2124.836714] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e14b69a0-82ab-47d6-8490-b55ba7771cb2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.873037] env[63024]: DEBUG nova.network.neutron [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Updating instance_info_cache with network_info: [{"id": "5b820f6b-5e1e-4bf8-a434-55cbc9e7968d", "address": "fa:16:3e:6d:fd:6a", "network": {"id": "c25a78c5-2c6d-4c87-8575-9620fbc983bd", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1693993539-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93098ad83ae144bf90a12c97ec863c06", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e66c4ebe-f808-4b34-bdb5-6c45edb1736f", "external-id": "cl2-zone-719", "segmentation_id": 719, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b820f6b-5e", "ovs_interfaceid": "5b820f6b-5e1e-4bf8-a434-55cbc9e7968d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2124.887503] env[63024]: DEBUG oslo_vmware.api [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951787, 'name': Rename_Task, 'duration_secs': 0.151987} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2124.889256] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2124.889456] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a91842f6-54bf-4606-80a8-de0aae9f0eaa {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.898712] env[63024]: DEBUG oslo_vmware.api [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 2124.898712] env[63024]: value = "task-1951790" [ 2124.898712] env[63024]: _type = "Task" [ 2124.898712] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2124.909922] env[63024]: DEBUG oslo_vmware.api [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951790, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2124.977166] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9bad1e5-1d8a-4567-9c93-4085aec15710 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2124.996606] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Updating instance 'a483e6b5-a192-4cfe-be36-1ce0667f5697' progress to 0 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2125.239033] env[63024]: DEBUG oslo_vmware.api [None req-f6156d42-8649-4974-b2ba-5b39cb6b0b31 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951789, 'name': PowerOffVM_Task, 'duration_secs': 0.212555} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2125.239033] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-f6156d42-8649-4974-b2ba-5b39cb6b0b31 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2125.239216] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f6156d42-8649-4974-b2ba-5b39cb6b0b31 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2125.239321] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b31482ff-e30e-4de8-ac6c-fa869a8cc5d0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.359261] env[63024]: INFO nova.compute.manager [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Took 21.12 seconds to build instance. [ 2125.374756] env[63024]: DEBUG oslo_concurrency.lockutils [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Releasing lock "refresh_cache-beefd67c-b791-4c19-822b-b0e21ec5f8ac" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2125.375081] env[63024]: DEBUG nova.compute.manager [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Instance network_info: |[{"id": "5b820f6b-5e1e-4bf8-a434-55cbc9e7968d", "address": "fa:16:3e:6d:fd:6a", "network": {"id": "c25a78c5-2c6d-4c87-8575-9620fbc983bd", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1693993539-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93098ad83ae144bf90a12c97ec863c06", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e66c4ebe-f808-4b34-bdb5-6c45edb1736f", "external-id": "cl2-zone-719", "segmentation_id": 719, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b820f6b-5e", "ovs_interfaceid": "5b820f6b-5e1e-4bf8-a434-55cbc9e7968d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2125.375542] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6d:fd:6a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e66c4ebe-f808-4b34-bdb5-6c45edb1736f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5b820f6b-5e1e-4bf8-a434-55cbc9e7968d', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2125.382923] env[63024]: DEBUG oslo.service.loopingcall [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2125.383456] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2125.383729] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a5aba33f-6dba-4070-9561-45e8ec168247 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.405578] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2125.405578] env[63024]: value = "task-1951792" [ 2125.405578] env[63024]: _type = "Task" [ 2125.405578] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2125.408812] env[63024]: DEBUG oslo_vmware.api [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951790, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2125.417333] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951792, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2125.502995] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2125.503841] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-58df057c-d324-4d09-bcee-ee1d4e0af1c3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.513635] env[63024]: DEBUG oslo_vmware.api [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 2125.513635] env[63024]: value = "task-1951793" [ 2125.513635] env[63024]: _type = "Task" [ 2125.513635] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2125.522436] env[63024]: DEBUG oslo_vmware.api [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951793, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2125.571924] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f6156d42-8649-4974-b2ba-5b39cb6b0b31 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2125.572173] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f6156d42-8649-4974-b2ba-5b39cb6b0b31 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2125.572372] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6156d42-8649-4974-b2ba-5b39cb6b0b31 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Deleting the datastore file [datastore1] 34e4db8e-e0d9-4a27-9368-c5e711b51a29 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2125.572675] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-142cab26-f7dc-4854-bd80-a95c55a6c972 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.585132] env[63024]: DEBUG oslo_vmware.api [None req-f6156d42-8649-4974-b2ba-5b39cb6b0b31 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for the task: (returnval){ [ 2125.585132] env[63024]: value = "task-1951794" [ 2125.585132] env[63024]: _type = "Task" [ 2125.585132] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2125.592315] env[63024]: DEBUG oslo_vmware.api [None req-f6156d42-8649-4974-b2ba-5b39cb6b0b31 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951794, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2125.861427] env[63024]: DEBUG oslo_concurrency.lockutils [None req-40719ae6-63f4-468c-8f71-9c55043af4db tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "3f350c3e-e9b3-4798-a424-fd32235d21cf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.632s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2125.918230] env[63024]: DEBUG oslo_vmware.api [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951790, 'name': PowerOnVM_Task, 'duration_secs': 1.007106} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2125.919027] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2125.919322] env[63024]: INFO nova.compute.manager [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Took 9.66 seconds to spawn the instance on the hypervisor. [ 2125.919593] env[63024]: DEBUG nova.compute.manager [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2125.920683] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb87ebc7-5fe1-4364-b0dd-4a8fa1fcf87d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.928349] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951792, 'name': CreateVM_Task, 'duration_secs': 0.369281} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2125.928999] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2125.929884] env[63024]: DEBUG oslo_concurrency.lockutils [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2125.930135] env[63024]: DEBUG oslo_concurrency.lockutils [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2125.930579] env[63024]: DEBUG oslo_concurrency.lockutils [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2125.930914] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-50da1bc0-c12e-4d2f-9333-d3bc939fe9a0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.942937] env[63024]: DEBUG oslo_vmware.api [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2125.942937] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52dfc4d7-827c-954a-c0e1-e9abf49bbe16" [ 2125.942937] env[63024]: _type = "Task" [ 2125.942937] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2125.954832] env[63024]: DEBUG oslo_vmware.api [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52dfc4d7-827c-954a-c0e1-e9abf49bbe16, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2125.972481] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7e60e61f-6a53-4ed1-bbcc-abdc08f43c09 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "3f350c3e-e9b3-4798-a424-fd32235d21cf" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2125.972699] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7e60e61f-6a53-4ed1-bbcc-abdc08f43c09 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "3f350c3e-e9b3-4798-a424-fd32235d21cf" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2125.973075] env[63024]: DEBUG nova.compute.manager [None req-7e60e61f-6a53-4ed1-bbcc-abdc08f43c09 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2125.975022] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee839ca7-b277-496a-8978-0a842d4a553b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2125.982314] env[63024]: DEBUG nova.compute.manager [None req-7e60e61f-6a53-4ed1-bbcc-abdc08f43c09 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63024) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 2125.982886] env[63024]: DEBUG nova.objects.instance [None req-7e60e61f-6a53-4ed1-bbcc-abdc08f43c09 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lazy-loading 'flavor' on Instance uuid 3f350c3e-e9b3-4798-a424-fd32235d21cf {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2126.022899] env[63024]: DEBUG oslo_vmware.api [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951793, 'name': PowerOffVM_Task, 'duration_secs': 0.227493} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2126.023182] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2126.023367] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Updating instance 'a483e6b5-a192-4cfe-be36-1ce0667f5697' progress to 17 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2126.093797] env[63024]: DEBUG oslo_vmware.api [None req-f6156d42-8649-4974-b2ba-5b39cb6b0b31 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Task: {'id': task-1951794, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.107252} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2126.094078] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-f6156d42-8649-4974-b2ba-5b39cb6b0b31 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2126.094275] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f6156d42-8649-4974-b2ba-5b39cb6b0b31 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2126.094458] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f6156d42-8649-4974-b2ba-5b39cb6b0b31 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2126.094643] env[63024]: INFO nova.compute.manager [None req-f6156d42-8649-4974-b2ba-5b39cb6b0b31 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Took 1.38 seconds to destroy the instance on the hypervisor. [ 2126.094922] env[63024]: DEBUG oslo.service.loopingcall [None req-f6156d42-8649-4974-b2ba-5b39cb6b0b31 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2126.095140] env[63024]: DEBUG nova.compute.manager [-] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2126.095237] env[63024]: DEBUG nova.network.neutron [-] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2126.330026] env[63024]: DEBUG nova.compute.manager [req-3cb252a7-9414-443b-ad21-4e9bf48fdc08 req-1b2d5710-50fa-49e0-ac52-ae7228c822eb service nova] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Received event network-vif-deleted-041c096f-ef1b-49ad-aadb-469b89fe4f25 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2126.330084] env[63024]: INFO nova.compute.manager [req-3cb252a7-9414-443b-ad21-4e9bf48fdc08 req-1b2d5710-50fa-49e0-ac52-ae7228c822eb service nova] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Neutron deleted interface 041c096f-ef1b-49ad-aadb-469b89fe4f25; detaching it from the instance and deleting it from the info cache [ 2126.330274] env[63024]: DEBUG nova.network.neutron [req-3cb252a7-9414-443b-ad21-4e9bf48fdc08 req-1b2d5710-50fa-49e0-ac52-ae7228c822eb service nova] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2126.449737] env[63024]: INFO nova.compute.manager [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Took 19.44 seconds to build instance. [ 2126.455520] env[63024]: DEBUG oslo_vmware.api [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52dfc4d7-827c-954a-c0e1-e9abf49bbe16, 'name': SearchDatastore_Task, 'duration_secs': 0.018449} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2126.455771] env[63024]: DEBUG oslo_concurrency.lockutils [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2126.455977] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2126.456228] env[63024]: DEBUG oslo_concurrency.lockutils [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2126.456374] env[63024]: DEBUG oslo_concurrency.lockutils [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2126.456550] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2126.456853] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4f9443cb-6a92-4a8d-8578-abcb04e3d3d1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.465934] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2126.466141] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2126.466826] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a9b81a5a-6576-4c17-873d-22985771f548 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.472470] env[63024]: DEBUG oslo_vmware.api [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2126.472470] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52070bad-5890-7954-6bd9-09e5aa08a8b7" [ 2126.472470] env[63024]: _type = "Task" [ 2126.472470] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2126.480862] env[63024]: DEBUG oslo_vmware.api [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52070bad-5890-7954-6bd9-09e5aa08a8b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2126.530191] env[63024]: DEBUG nova.virt.hardware [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2126.530459] env[63024]: DEBUG nova.virt.hardware [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2126.530620] env[63024]: DEBUG nova.virt.hardware [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2126.530806] env[63024]: DEBUG nova.virt.hardware [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2126.530953] env[63024]: DEBUG nova.virt.hardware [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2126.531140] env[63024]: DEBUG nova.virt.hardware [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2126.531355] env[63024]: DEBUG nova.virt.hardware [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2126.531518] env[63024]: DEBUG nova.virt.hardware [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2126.531759] env[63024]: DEBUG nova.virt.hardware [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2126.532162] env[63024]: DEBUG nova.virt.hardware [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2126.532468] env[63024]: DEBUG nova.virt.hardware [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2126.538419] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9ac348c5-9c44-4e8b-b891-481d58903d91 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.556372] env[63024]: DEBUG oslo_vmware.api [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 2126.556372] env[63024]: value = "task-1951795" [ 2126.556372] env[63024]: _type = "Task" [ 2126.556372] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2126.565017] env[63024]: DEBUG oslo_vmware.api [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951795, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2126.809033] env[63024]: DEBUG nova.network.neutron [-] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2126.832526] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-29897216-6094-4c51-88b2-5e4673e561ac {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.843663] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1297e821-ff30-4f21-bd3f-54bc3b0381fc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.883913] env[63024]: DEBUG nova.compute.manager [req-3cb252a7-9414-443b-ad21-4e9bf48fdc08 req-1b2d5710-50fa-49e0-ac52-ae7228c822eb service nova] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Detach interface failed, port_id=041c096f-ef1b-49ad-aadb-469b89fe4f25, reason: Instance 34e4db8e-e0d9-4a27-9368-c5e711b51a29 could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 2126.951506] env[63024]: DEBUG oslo_concurrency.lockutils [None req-09de8c72-4d7e-44a7-875a-85b11df5ee5c tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "94893f45-fb96-463b-82a9-e2fd884b81f8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.946s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2126.985387] env[63024]: DEBUG oslo_vmware.api [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52070bad-5890-7954-6bd9-09e5aa08a8b7, 'name': SearchDatastore_Task, 'duration_secs': 0.008696} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2126.986215] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0cbade9f-9ebc-4718-a5da-bd8b9c0396b8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.991792] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e60e61f-6a53-4ed1-bbcc-abdc08f43c09 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2126.992146] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eb159b49-f07c-4c3d-9233-05d1b3421e57 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2126.996043] env[63024]: DEBUG oslo_vmware.api [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2126.996043] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52ebc9b1-9122-da63-3360-21cc3827cc1a" [ 2126.996043] env[63024]: _type = "Task" [ 2126.996043] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2127.003011] env[63024]: DEBUG oslo_vmware.api [None req-7e60e61f-6a53-4ed1-bbcc-abdc08f43c09 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 2127.003011] env[63024]: value = "task-1951797" [ 2127.003011] env[63024]: _type = "Task" [ 2127.003011] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2127.010620] env[63024]: DEBUG oslo_vmware.api [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52ebc9b1-9122-da63-3360-21cc3827cc1a, 'name': SearchDatastore_Task, 'duration_secs': 0.011367} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2127.011289] env[63024]: DEBUG oslo_concurrency.lockutils [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2127.011565] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] beefd67c-b791-4c19-822b-b0e21ec5f8ac/beefd67c-b791-4c19-822b-b0e21ec5f8ac.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2127.011856] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1daa74ee-28ea-4091-9a8d-049494fbe31b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.016857] env[63024]: DEBUG oslo_vmware.api [None req-7e60e61f-6a53-4ed1-bbcc-abdc08f43c09 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951797, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2127.023187] env[63024]: DEBUG oslo_vmware.api [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2127.023187] env[63024]: value = "task-1951798" [ 2127.023187] env[63024]: _type = "Task" [ 2127.023187] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2127.031838] env[63024]: DEBUG oslo_vmware.api [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951798, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2127.067399] env[63024]: DEBUG oslo_vmware.api [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951795, 'name': ReconfigVM_Task, 'duration_secs': 0.213288} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2127.067729] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Updating instance 'a483e6b5-a192-4cfe-be36-1ce0667f5697' progress to 33 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2127.313658] env[63024]: INFO nova.compute.manager [-] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Took 1.22 seconds to deallocate network for instance. [ 2127.514659] env[63024]: DEBUG oslo_vmware.api [None req-7e60e61f-6a53-4ed1-bbcc-abdc08f43c09 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951797, 'name': PowerOffVM_Task, 'duration_secs': 0.223127} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2127.514949] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e60e61f-6a53-4ed1-bbcc-abdc08f43c09 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2127.515224] env[63024]: DEBUG nova.compute.manager [None req-7e60e61f-6a53-4ed1-bbcc-abdc08f43c09 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2127.515932] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24b78355-98c6-4ede-81f1-c3c52fc9a717 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.531693] env[63024]: DEBUG oslo_vmware.api [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951798, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.487524} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2127.531932] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] beefd67c-b791-4c19-822b-b0e21ec5f8ac/beefd67c-b791-4c19-822b-b0e21ec5f8ac.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2127.532184] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2127.532515] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ab63faa0-87ce-48a4-8845-ddcbd6e721b5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.539673] env[63024]: DEBUG oslo_vmware.api [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2127.539673] env[63024]: value = "task-1951799" [ 2127.539673] env[63024]: _type = "Task" [ 2127.539673] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2127.550516] env[63024]: DEBUG oslo_vmware.api [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951799, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2127.574769] env[63024]: DEBUG nova.virt.hardware [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2127.575315] env[63024]: DEBUG nova.virt.hardware [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2127.575315] env[63024]: DEBUG nova.virt.hardware [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2127.575668] env[63024]: DEBUG nova.virt.hardware [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2127.575712] env[63024]: DEBUG nova.virt.hardware [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2127.575884] env[63024]: DEBUG nova.virt.hardware [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2127.576207] env[63024]: DEBUG nova.virt.hardware [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2127.576280] env[63024]: DEBUG nova.virt.hardware [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2127.576461] env[63024]: DEBUG nova.virt.hardware [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2127.576657] env[63024]: DEBUG nova.virt.hardware [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2127.576869] env[63024]: DEBUG nova.virt.hardware [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2127.582184] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Reconfiguring VM instance instance-0000006f to detach disk 2000 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2127.582690] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ecefc351-a623-42cb-b594-cf1016cf1be7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2127.602506] env[63024]: DEBUG oslo_vmware.api [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 2127.602506] env[63024]: value = "task-1951800" [ 2127.602506] env[63024]: _type = "Task" [ 2127.602506] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2127.611822] env[63024]: DEBUG oslo_vmware.api [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951800, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2127.824566] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f6156d42-8649-4974-b2ba-5b39cb6b0b31 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2127.824837] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f6156d42-8649-4974-b2ba-5b39cb6b0b31 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2127.825094] env[63024]: DEBUG nova.objects.instance [None req-f6156d42-8649-4974-b2ba-5b39cb6b0b31 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lazy-loading 'resources' on Instance uuid 34e4db8e-e0d9-4a27-9368-c5e711b51a29 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2128.030141] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7e60e61f-6a53-4ed1-bbcc-abdc08f43c09 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "3f350c3e-e9b3-4798-a424-fd32235d21cf" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.057s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2128.050302] env[63024]: DEBUG oslo_vmware.api [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951799, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.103535} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2128.050575] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2128.051335] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f462d509-8ed5-4810-9f76-7141f6353c1b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.075048] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Reconfiguring VM instance instance-00000072 to attach disk [datastore1] beefd67c-b791-4c19-822b-b0e21ec5f8ac/beefd67c-b791-4c19-822b-b0e21ec5f8ac.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2128.075598] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-46128ce1-591b-4b45-ac64-0e7d9b6505fd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.095710] env[63024]: DEBUG oslo_vmware.api [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2128.095710] env[63024]: value = "task-1951801" [ 2128.095710] env[63024]: _type = "Task" [ 2128.095710] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2128.105140] env[63024]: DEBUG oslo_vmware.api [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951801, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2128.112087] env[63024]: DEBUG oslo_vmware.api [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951800, 'name': ReconfigVM_Task, 'duration_secs': 0.420602} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2128.112368] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Reconfigured VM instance instance-0000006f to detach disk 2000 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2128.113137] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6477cb0a-1dff-4743-b7de-33197855f32e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.135371] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] volume-f9f5f8da-d54f-41bf-9fd3-c3e75748a910/volume-f9f5f8da-d54f-41bf-9fd3-c3e75748a910.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2128.135688] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-04b3603e-ade0-4316-b43c-ec69d9dfbe2f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.154304] env[63024]: DEBUG oslo_vmware.api [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 2128.154304] env[63024]: value = "task-1951802" [ 2128.154304] env[63024]: _type = "Task" [ 2128.154304] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2128.162205] env[63024]: DEBUG oslo_vmware.api [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951802, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2128.355876] env[63024]: DEBUG nova.compute.manager [req-132cadcc-4d0f-432a-9a54-f2046744ef48 req-4f4e00db-03af-47a6-88ff-3573cde55cd5 service nova] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Received event network-changed-61d84146-ea46-4ba6-ab7a-7e81dec991d2 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2128.356100] env[63024]: DEBUG nova.compute.manager [req-132cadcc-4d0f-432a-9a54-f2046744ef48 req-4f4e00db-03af-47a6-88ff-3573cde55cd5 service nova] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Refreshing instance network info cache due to event network-changed-61d84146-ea46-4ba6-ab7a-7e81dec991d2. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2128.356317] env[63024]: DEBUG oslo_concurrency.lockutils [req-132cadcc-4d0f-432a-9a54-f2046744ef48 req-4f4e00db-03af-47a6-88ff-3573cde55cd5 service nova] Acquiring lock "refresh_cache-94893f45-fb96-463b-82a9-e2fd884b81f8" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2128.356463] env[63024]: DEBUG oslo_concurrency.lockutils [req-132cadcc-4d0f-432a-9a54-f2046744ef48 req-4f4e00db-03af-47a6-88ff-3573cde55cd5 service nova] Acquired lock "refresh_cache-94893f45-fb96-463b-82a9-e2fd884b81f8" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2128.356623] env[63024]: DEBUG nova.network.neutron [req-132cadcc-4d0f-432a-9a54-f2046744ef48 req-4f4e00db-03af-47a6-88ff-3573cde55cd5 service nova] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Refreshing network info cache for port 61d84146-ea46-4ba6-ab7a-7e81dec991d2 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2128.489977] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b494a232-91d2-457b-84ce-de5f1ca86bec {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.498071] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa57661a-acde-4494-870e-caa7d14052ce {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.529609] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88eaaa69-9e24-44e7-ad11-ffdb11b0daba {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.538540] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0b1906a-2717-48ac-8fc5-163efe7c6846 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.551792] env[63024]: DEBUG nova.compute.provider_tree [None req-f6156d42-8649-4974-b2ba-5b39cb6b0b31 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2128.605866] env[63024]: DEBUG oslo_vmware.api [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951801, 'name': ReconfigVM_Task, 'duration_secs': 0.358822} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2128.606164] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Reconfigured VM instance instance-00000072 to attach disk [datastore1] beefd67c-b791-4c19-822b-b0e21ec5f8ac/beefd67c-b791-4c19-822b-b0e21ec5f8ac.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2128.606789] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-30c154d7-6004-42cf-bfec-702b55537b6d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2128.613462] env[63024]: DEBUG oslo_vmware.api [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2128.613462] env[63024]: value = "task-1951803" [ 2128.613462] env[63024]: _type = "Task" [ 2128.613462] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2128.620750] env[63024]: DEBUG oslo_vmware.api [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951803, 'name': Rename_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2128.665355] env[63024]: DEBUG oslo_vmware.api [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951802, 'name': ReconfigVM_Task, 'duration_secs': 0.333904} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2128.665613] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Reconfigured VM instance instance-0000006f to attach disk [datastore1] volume-f9f5f8da-d54f-41bf-9fd3-c3e75748a910/volume-f9f5f8da-d54f-41bf-9fd3-c3e75748a910.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2128.665866] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Updating instance 'a483e6b5-a192-4cfe-be36-1ce0667f5697' progress to 50 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2128.730861] env[63024]: DEBUG oslo_concurrency.lockutils [None req-76c64734-7823-4755-899b-6269dc5de9fd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "3f350c3e-e9b3-4798-a424-fd32235d21cf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2128.731188] env[63024]: DEBUG oslo_concurrency.lockutils [None req-76c64734-7823-4755-899b-6269dc5de9fd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "3f350c3e-e9b3-4798-a424-fd32235d21cf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2128.731415] env[63024]: DEBUG oslo_concurrency.lockutils [None req-76c64734-7823-4755-899b-6269dc5de9fd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "3f350c3e-e9b3-4798-a424-fd32235d21cf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2128.731601] env[63024]: DEBUG oslo_concurrency.lockutils [None req-76c64734-7823-4755-899b-6269dc5de9fd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "3f350c3e-e9b3-4798-a424-fd32235d21cf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2128.731797] env[63024]: DEBUG oslo_concurrency.lockutils [None req-76c64734-7823-4755-899b-6269dc5de9fd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "3f350c3e-e9b3-4798-a424-fd32235d21cf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2128.733856] env[63024]: INFO nova.compute.manager [None req-76c64734-7823-4755-899b-6269dc5de9fd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Terminating instance [ 2128.986801] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e9b434f-1c03-44e6-a4df-c8d086919675 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Volume attach. Driver type: vmdk {{(pid=63024) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2128.987085] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e9b434f-1c03-44e6-a4df-c8d086919675 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402253', 'volume_id': 'fd669326-4ffa-49a6-9107-74d2b957b5f4', 'name': 'volume-fd669326-4ffa-49a6-9107-74d2b957b5f4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5192ad93-a4e9-4aa0-983d-186ab17360f0', 'attached_at': '', 'detached_at': '', 'volume_id': 'fd669326-4ffa-49a6-9107-74d2b957b5f4', 'serial': 'fd669326-4ffa-49a6-9107-74d2b957b5f4'} {{(pid=63024) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2128.987950] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fa38f1e-6082-498c-992e-bb69b92c258c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.005894] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3ffe622-0624-4a97-a8ed-fd8d6816e16b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.032268] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e9b434f-1c03-44e6-a4df-c8d086919675 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] volume-fd669326-4ffa-49a6-9107-74d2b957b5f4/volume-fd669326-4ffa-49a6-9107-74d2b957b5f4.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2129.034231] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e96c7c18-6e9c-4779-9509-238b2e88a82a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.052806] env[63024]: DEBUG oslo_vmware.api [None req-7e9b434f-1c03-44e6-a4df-c8d086919675 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Waiting for the task: (returnval){ [ 2129.052806] env[63024]: value = "task-1951804" [ 2129.052806] env[63024]: _type = "Task" [ 2129.052806] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2129.058014] env[63024]: DEBUG nova.scheduler.client.report [None req-f6156d42-8649-4974-b2ba-5b39cb6b0b31 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2129.067630] env[63024]: DEBUG oslo_vmware.api [None req-7e9b434f-1c03-44e6-a4df-c8d086919675 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951804, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2129.122804] env[63024]: DEBUG oslo_vmware.api [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951803, 'name': Rename_Task, 'duration_secs': 0.497993} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2129.123094] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2129.123382] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-70b9abbd-2fe0-44fd-af7f-795cde5bdf67 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.131186] env[63024]: DEBUG oslo_vmware.api [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2129.131186] env[63024]: value = "task-1951805" [ 2129.131186] env[63024]: _type = "Task" [ 2129.131186] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2129.138571] env[63024]: DEBUG oslo_vmware.api [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951805, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2129.163210] env[63024]: DEBUG nova.network.neutron [req-132cadcc-4d0f-432a-9a54-f2046744ef48 req-4f4e00db-03af-47a6-88ff-3573cde55cd5 service nova] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Updated VIF entry in instance network info cache for port 61d84146-ea46-4ba6-ab7a-7e81dec991d2. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2129.163582] env[63024]: DEBUG nova.network.neutron [req-132cadcc-4d0f-432a-9a54-f2046744ef48 req-4f4e00db-03af-47a6-88ff-3573cde55cd5 service nova] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Updating instance_info_cache with network_info: [{"id": "61d84146-ea46-4ba6-ab7a-7e81dec991d2", "address": "fa:16:3e:52:e4:c0", "network": {"id": "18684658-e754-4649-b059-43f84e447803", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-48651862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d0715f0ccbd49ec8af8e3049d970994", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98011432-48cc-4ffd-a5a8-b96d2ea4424a", "external-id": "nsx-vlan-transportzone-745", "segmentation_id": 745, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap61d84146-ea", "ovs_interfaceid": "61d84146-ea46-4ba6-ab7a-7e81dec991d2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2129.172274] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8be62645-1466-46f9-bf6f-1c19e506aa7f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.192475] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3877f99f-130d-42cc-94b5-09c8e0a24b03 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.214072] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Updating instance 'a483e6b5-a192-4cfe-be36-1ce0667f5697' progress to 67 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2129.239062] env[63024]: DEBUG nova.compute.manager [None req-76c64734-7823-4755-899b-6269dc5de9fd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2129.239062] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-76c64734-7823-4755-899b-6269dc5de9fd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2129.239062] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1581fd13-0aa6-4133-b2b1-69333f607c74 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.246129] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-76c64734-7823-4755-899b-6269dc5de9fd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2129.246389] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-12d1946e-e3b1-413c-8cdc-b2c29a9c4825 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.331401] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-76c64734-7823-4755-899b-6269dc5de9fd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2129.331704] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-76c64734-7823-4755-899b-6269dc5de9fd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2129.331913] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-76c64734-7823-4755-899b-6269dc5de9fd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Deleting the datastore file [datastore1] 3f350c3e-e9b3-4798-a424-fd32235d21cf {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2129.332177] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0b665e0b-707c-4b37-9343-7474bf13f256 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2129.338841] env[63024]: DEBUG oslo_vmware.api [None req-76c64734-7823-4755-899b-6269dc5de9fd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 2129.338841] env[63024]: value = "task-1951807" [ 2129.338841] env[63024]: _type = "Task" [ 2129.338841] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2129.346926] env[63024]: DEBUG oslo_vmware.api [None req-76c64734-7823-4755-899b-6269dc5de9fd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951807, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2129.563859] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f6156d42-8649-4974-b2ba-5b39cb6b0b31 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.738s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2129.569356] env[63024]: DEBUG oslo_vmware.api [None req-7e9b434f-1c03-44e6-a4df-c8d086919675 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951804, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2129.590083] env[63024]: INFO nova.scheduler.client.report [None req-f6156d42-8649-4974-b2ba-5b39cb6b0b31 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Deleted allocations for instance 34e4db8e-e0d9-4a27-9368-c5e711b51a29 [ 2129.641440] env[63024]: DEBUG oslo_vmware.api [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951805, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2129.666678] env[63024]: DEBUG oslo_concurrency.lockutils [req-132cadcc-4d0f-432a-9a54-f2046744ef48 req-4f4e00db-03af-47a6-88ff-3573cde55cd5 service nova] Releasing lock "refresh_cache-94893f45-fb96-463b-82a9-e2fd884b81f8" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2129.849257] env[63024]: DEBUG oslo_vmware.api [None req-76c64734-7823-4755-899b-6269dc5de9fd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951807, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.126858} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2129.849480] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-76c64734-7823-4755-899b-6269dc5de9fd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2129.849666] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-76c64734-7823-4755-899b-6269dc5de9fd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2129.849848] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-76c64734-7823-4755-899b-6269dc5de9fd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2129.850060] env[63024]: INFO nova.compute.manager [None req-76c64734-7823-4755-899b-6269dc5de9fd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Took 0.61 seconds to destroy the instance on the hypervisor. [ 2129.850286] env[63024]: DEBUG oslo.service.loopingcall [None req-76c64734-7823-4755-899b-6269dc5de9fd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2129.850486] env[63024]: DEBUG nova.compute.manager [-] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2129.850581] env[63024]: DEBUG nova.network.neutron [-] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2130.065189] env[63024]: DEBUG oslo_vmware.api [None req-7e9b434f-1c03-44e6-a4df-c8d086919675 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951804, 'name': ReconfigVM_Task, 'duration_secs': 0.510956} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2130.066030] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e9b434f-1c03-44e6-a4df-c8d086919675 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Reconfigured VM instance instance-00000068 to attach disk [datastore1] volume-fd669326-4ffa-49a6-9107-74d2b957b5f4/volume-fd669326-4ffa-49a6-9107-74d2b957b5f4.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2130.070835] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4162417e-c536-441a-87b6-9fc5b43dcda4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.087732] env[63024]: DEBUG oslo_vmware.api [None req-7e9b434f-1c03-44e6-a4df-c8d086919675 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Waiting for the task: (returnval){ [ 2130.087732] env[63024]: value = "task-1951808" [ 2130.087732] env[63024]: _type = "Task" [ 2130.087732] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2130.098555] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f6156d42-8649-4974-b2ba-5b39cb6b0b31 tempest-AttachInterfacesTestJSON-1162117638 tempest-AttachInterfacesTestJSON-1162117638-project-member] Lock "34e4db8e-e0d9-4a27-9368-c5e711b51a29" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.893s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2130.104456] env[63024]: DEBUG oslo_vmware.api [None req-7e9b434f-1c03-44e6-a4df-c8d086919675 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951808, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2130.141564] env[63024]: DEBUG oslo_vmware.api [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951805, 'name': PowerOnVM_Task, 'duration_secs': 0.515408} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2130.141869] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2130.142094] env[63024]: INFO nova.compute.manager [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Took 8.78 seconds to spawn the instance on the hypervisor. [ 2130.142272] env[63024]: DEBUG nova.compute.manager [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2130.143045] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d53abb0d-8ed0-42a6-a070-95b2b011fc2b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.405208] env[63024]: DEBUG nova.compute.manager [req-14ac272b-7f63-4cc7-b882-28510eee6625 req-3cc38bc8-e1d1-467e-87aa-9691250de186 service nova] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Received event network-vif-deleted-bd02ff9f-c94b-4697-90ee-516a789ffac6 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2130.405435] env[63024]: INFO nova.compute.manager [req-14ac272b-7f63-4cc7-b882-28510eee6625 req-3cc38bc8-e1d1-467e-87aa-9691250de186 service nova] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Neutron deleted interface bd02ff9f-c94b-4697-90ee-516a789ffac6; detaching it from the instance and deleting it from the info cache [ 2130.405611] env[63024]: DEBUG nova.network.neutron [req-14ac272b-7f63-4cc7-b882-28510eee6625 req-3cc38bc8-e1d1-467e-87aa-9691250de186 service nova] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2130.545683] env[63024]: DEBUG nova.network.neutron [-] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2130.597375] env[63024]: DEBUG oslo_vmware.api [None req-7e9b434f-1c03-44e6-a4df-c8d086919675 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951808, 'name': ReconfigVM_Task, 'duration_secs': 0.166691} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2130.597696] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-7e9b434f-1c03-44e6-a4df-c8d086919675 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402253', 'volume_id': 'fd669326-4ffa-49a6-9107-74d2b957b5f4', 'name': 'volume-fd669326-4ffa-49a6-9107-74d2b957b5f4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5192ad93-a4e9-4aa0-983d-186ab17360f0', 'attached_at': '', 'detached_at': '', 'volume_id': 'fd669326-4ffa-49a6-9107-74d2b957b5f4', 'serial': 'fd669326-4ffa-49a6-9107-74d2b957b5f4'} {{(pid=63024) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2130.659527] env[63024]: INFO nova.compute.manager [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Took 14.82 seconds to build instance. [ 2130.858167] env[63024]: DEBUG nova.network.neutron [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Port d9aaf9d8-7a03-4fe5-8494-40497e138b13 binding to destination host cpu-1 is already ACTIVE {{(pid=63024) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2130.907997] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d4979343-4d2f-4e6b-8ee4-bbbef0e4e33c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.917630] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79c5923a-f57d-467d-8c20-6b3c63803bfd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2130.948803] env[63024]: DEBUG nova.compute.manager [req-14ac272b-7f63-4cc7-b882-28510eee6625 req-3cc38bc8-e1d1-467e-87aa-9691250de186 service nova] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Detach interface failed, port_id=bd02ff9f-c94b-4697-90ee-516a789ffac6, reason: Instance 3f350c3e-e9b3-4798-a424-fd32235d21cf could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 2131.048125] env[63024]: INFO nova.compute.manager [-] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Took 1.20 seconds to deallocate network for instance. [ 2131.161802] env[63024]: DEBUG oslo_concurrency.lockutils [None req-31f4aa5a-2175-4136-9196-492e14ace66d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "beefd67c-b791-4c19-822b-b0e21ec5f8ac" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.331s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2131.554339] env[63024]: DEBUG oslo_concurrency.lockutils [None req-76c64734-7823-4755-899b-6269dc5de9fd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2131.554769] env[63024]: DEBUG oslo_concurrency.lockutils [None req-76c64734-7823-4755-899b-6269dc5de9fd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2131.555137] env[63024]: DEBUG nova.objects.instance [None req-76c64734-7823-4755-899b-6269dc5de9fd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lazy-loading 'resources' on Instance uuid 3f350c3e-e9b3-4798-a424-fd32235d21cf {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2131.648537] env[63024]: DEBUG nova.objects.instance [None req-7e9b434f-1c03-44e6-a4df-c8d086919675 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lazy-loading 'flavor' on Instance uuid 5192ad93-a4e9-4aa0-983d-186ab17360f0 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2131.887899] env[63024]: DEBUG oslo_concurrency.lockutils [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "a483e6b5-a192-4cfe-be36-1ce0667f5697-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2131.888378] env[63024]: DEBUG oslo_concurrency.lockutils [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "a483e6b5-a192-4cfe-be36-1ce0667f5697-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2131.888697] env[63024]: DEBUG oslo_concurrency.lockutils [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "a483e6b5-a192-4cfe-be36-1ce0667f5697-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2132.154525] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7e9b434f-1c03-44e6-a4df-c8d086919675 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lock "5192ad93-a4e9-4aa0-983d-186ab17360f0" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.773s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2132.186847] env[63024]: DEBUG oslo_concurrency.lockutils [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "beefd67c-b791-4c19-822b-b0e21ec5f8ac" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2132.187183] env[63024]: DEBUG oslo_concurrency.lockutils [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "beefd67c-b791-4c19-822b-b0e21ec5f8ac" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2132.187370] env[63024]: INFO nova.compute.manager [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Shelving [ 2132.218606] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddd0e52d-c762-4a2a-847e-2af6c45891ad {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.226562] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e56fd9d-2268-456e-8ecf-620b779b555b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.259675] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6057d71-8f79-4a0c-bff5-9f184d5bd59f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.267886] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d3c825c-e62b-46a4-904d-f1903641698d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2132.284476] env[63024]: DEBUG nova.compute.provider_tree [None req-76c64734-7823-4755-899b-6269dc5de9fd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2132.507248] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9e4a8401-38c5-419e-94d0-b4c609a96ff2 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Acquiring lock "5192ad93-a4e9-4aa0-983d-186ab17360f0" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2132.507492] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9e4a8401-38c5-419e-94d0-b4c609a96ff2 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lock "5192ad93-a4e9-4aa0-983d-186ab17360f0" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2132.788018] env[63024]: DEBUG nova.scheduler.client.report [None req-76c64734-7823-4755-899b-6269dc5de9fd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2132.949770] env[63024]: DEBUG oslo_concurrency.lockutils [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "refresh_cache-a483e6b5-a192-4cfe-be36-1ce0667f5697" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2132.949991] env[63024]: DEBUG oslo_concurrency.lockutils [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquired lock "refresh_cache-a483e6b5-a192-4cfe-be36-1ce0667f5697" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2132.950198] env[63024]: DEBUG nova.network.neutron [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2133.011140] env[63024]: DEBUG nova.compute.utils [None req-9e4a8401-38c5-419e-94d0-b4c609a96ff2 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2133.197299] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2133.198265] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-affe805d-8227-415f-bfdf-dc707f03e14b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.205646] env[63024]: DEBUG oslo_vmware.api [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2133.205646] env[63024]: value = "task-1951809" [ 2133.205646] env[63024]: _type = "Task" [ 2133.205646] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2133.213696] env[63024]: DEBUG oslo_vmware.api [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951809, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2133.293341] env[63024]: DEBUG oslo_concurrency.lockutils [None req-76c64734-7823-4755-899b-6269dc5de9fd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.738s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2133.320983] env[63024]: INFO nova.scheduler.client.report [None req-76c64734-7823-4755-899b-6269dc5de9fd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Deleted allocations for instance 3f350c3e-e9b3-4798-a424-fd32235d21cf [ 2133.514042] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9e4a8401-38c5-419e-94d0-b4c609a96ff2 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lock "5192ad93-a4e9-4aa0-983d-186ab17360f0" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2133.689701] env[63024]: DEBUG nova.network.neutron [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Updating instance_info_cache with network_info: [{"id": "d9aaf9d8-7a03-4fe5-8494-40497e138b13", "address": "fa:16:3e:37:15:c7", "network": {"id": "8b46e914-e3e8-40c4-8f9d-01d1f97bf25e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1558746173-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bbfeec6d47746328f185acd132e0d5a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afae6acd-1873-4228-9d5a-1cd5d4efe3e4", "external-id": "nsx-vlan-transportzone-183", "segmentation_id": 183, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9aaf9d8-7a", "ovs_interfaceid": "d9aaf9d8-7a03-4fe5-8494-40497e138b13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2133.715947] env[63024]: DEBUG oslo_vmware.api [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951809, 'name': PowerOffVM_Task, 'duration_secs': 0.212695} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2133.716209] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2133.716988] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91fe119c-e1f4-4bf8-83c4-d5ce7451bd79 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.735431] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8170aa73-6c5d-4b31-aab3-0469e88598c5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.828736] env[63024]: DEBUG oslo_concurrency.lockutils [None req-76c64734-7823-4755-899b-6269dc5de9fd tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "3f350c3e-e9b3-4798-a424-fd32235d21cf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.097s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2134.192593] env[63024]: DEBUG oslo_concurrency.lockutils [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Releasing lock "refresh_cache-a483e6b5-a192-4cfe-be36-1ce0667f5697" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2134.245650] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Creating Snapshot of the VM instance {{(pid=63024) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2134.245987] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-f4833d82-1edf-4c16-a820-a9d4735ad224 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.254442] env[63024]: DEBUG oslo_vmware.api [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2134.254442] env[63024]: value = "task-1951810" [ 2134.254442] env[63024]: _type = "Task" [ 2134.254442] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2134.264730] env[63024]: DEBUG oslo_vmware.api [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951810, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2134.585959] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9e4a8401-38c5-419e-94d0-b4c609a96ff2 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Acquiring lock "5192ad93-a4e9-4aa0-983d-186ab17360f0" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2134.586287] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9e4a8401-38c5-419e-94d0-b4c609a96ff2 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lock "5192ad93-a4e9-4aa0-983d-186ab17360f0" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2134.586548] env[63024]: INFO nova.compute.manager [None req-9e4a8401-38c5-419e-94d0-b4c609a96ff2 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Attaching volume a6b03d8a-56b7-4c68-a7ad-78d907b3d529 to /dev/sdc [ 2134.619406] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-117bdd4a-4025-4a85-96eb-7939427a397f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.628724] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d7920cc-4dab-4f10-a367-5ceee9ff8d06 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.642633] env[63024]: DEBUG nova.virt.block_device [None req-9e4a8401-38c5-419e-94d0-b4c609a96ff2 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Updating existing volume attachment record: fd4029d2-2e18-44ec-a4b4-c0b25e42c64d {{(pid=63024) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2134.704243] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3ee5d1c-791e-4262-8a81-cb6e2faf2e95 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.711803] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c111fa68-60c4-4c8b-a82a-677da07e1945 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.764577] env[63024]: DEBUG oslo_vmware.api [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951810, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2134.829325] env[63024]: DEBUG oslo_concurrency.lockutils [None req-bbc2dd8d-83b8-4f28-8c1b-b56c59619f29 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "df2933d1-32c3-48a6-8ceb-d5e3047d0b78" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2134.829618] env[63024]: DEBUG oslo_concurrency.lockutils [None req-bbc2dd8d-83b8-4f28-8c1b-b56c59619f29 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "df2933d1-32c3-48a6-8ceb-d5e3047d0b78" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2134.829830] env[63024]: DEBUG oslo_concurrency.lockutils [None req-bbc2dd8d-83b8-4f28-8c1b-b56c59619f29 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "df2933d1-32c3-48a6-8ceb-d5e3047d0b78-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2134.830094] env[63024]: DEBUG oslo_concurrency.lockutils [None req-bbc2dd8d-83b8-4f28-8c1b-b56c59619f29 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "df2933d1-32c3-48a6-8ceb-d5e3047d0b78-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2134.830364] env[63024]: DEBUG oslo_concurrency.lockutils [None req-bbc2dd8d-83b8-4f28-8c1b-b56c59619f29 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "df2933d1-32c3-48a6-8ceb-d5e3047d0b78-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2134.833091] env[63024]: INFO nova.compute.manager [None req-bbc2dd8d-83b8-4f28-8c1b-b56c59619f29 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Terminating instance [ 2135.270953] env[63024]: DEBUG oslo_vmware.api [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951810, 'name': CreateSnapshot_Task, 'duration_secs': 0.570626} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2135.271208] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Created Snapshot of the VM instance {{(pid=63024) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2135.272048] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3462bb1-3bb8-41b8-94c2-4c68d907b250 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.336739] env[63024]: DEBUG nova.compute.manager [None req-bbc2dd8d-83b8-4f28-8c1b-b56c59619f29 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2135.339013] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-bbc2dd8d-83b8-4f28-8c1b-b56c59619f29 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2135.339493] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eb929c5-034c-44ba-91a7-a57b4560b720 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.347843] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbc2dd8d-83b8-4f28-8c1b-b56c59619f29 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2135.348270] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-da7e5c01-d301-45e2-b4bd-ab223dbf110f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.355213] env[63024]: DEBUG oslo_vmware.api [None req-bbc2dd8d-83b8-4f28-8c1b-b56c59619f29 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 2135.355213] env[63024]: value = "task-1951812" [ 2135.355213] env[63024]: _type = "Task" [ 2135.355213] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2135.368399] env[63024]: DEBUG oslo_vmware.api [None req-bbc2dd8d-83b8-4f28-8c1b-b56c59619f29 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951812, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2135.426152] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2135.426410] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2135.426602] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Starting heal instance info cache {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10258}} [ 2135.798042] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Creating linked-clone VM from snapshot {{(pid=63024) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2135.798368] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-02fba717-b290-4b3c-b435-0a9d55b80a6a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.808017] env[63024]: DEBUG oslo_vmware.api [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2135.808017] env[63024]: value = "task-1951813" [ 2135.808017] env[63024]: _type = "Task" [ 2135.808017] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2135.816320] env[63024]: DEBUG oslo_vmware.api [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951813, 'name': CloneVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2135.846496] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09783783-3c43-4b83-adc4-a882ddb0224f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.869808] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d01ef12-359f-4ee7-9556-5edfe92cc6ff {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.877843] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Updating instance 'a483e6b5-a192-4cfe-be36-1ce0667f5697' progress to 83 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2135.885163] env[63024]: DEBUG oslo_vmware.api [None req-bbc2dd8d-83b8-4f28-8c1b-b56c59619f29 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951812, 'name': PowerOffVM_Task, 'duration_secs': 0.197113} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2135.885163] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-bbc2dd8d-83b8-4f28-8c1b-b56c59619f29 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2135.885163] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-bbc2dd8d-83b8-4f28-8c1b-b56c59619f29 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2135.885514] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f2e7b697-636d-422a-a307-dbd1b29bd450 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2135.930508] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Skipping network cache update for instance because it is being deleted. {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10307}} [ 2136.197095] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-bbc2dd8d-83b8-4f28-8c1b-b56c59619f29 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2136.197349] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-bbc2dd8d-83b8-4f28-8c1b-b56c59619f29 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2136.197546] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbc2dd8d-83b8-4f28-8c1b-b56c59619f29 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Deleting the datastore file [datastore1] df2933d1-32c3-48a6-8ceb-d5e3047d0b78 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2136.197842] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dc3de8dd-fbe6-4176-8e5c-71e62918d81e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.204172] env[63024]: DEBUG oslo_vmware.api [None req-bbc2dd8d-83b8-4f28-8c1b-b56c59619f29 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for the task: (returnval){ [ 2136.204172] env[63024]: value = "task-1951815" [ 2136.204172] env[63024]: _type = "Task" [ 2136.204172] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2136.212673] env[63024]: DEBUG oslo_vmware.api [None req-bbc2dd8d-83b8-4f28-8c1b-b56c59619f29 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951815, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2136.318231] env[63024]: DEBUG oslo_vmware.api [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951813, 'name': CloneVM_Task} progress is 93%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2136.388017] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2136.388432] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b2873987-b5fc-44a3-85cc-61a773e76562 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2136.395955] env[63024]: DEBUG oslo_vmware.api [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 2136.395955] env[63024]: value = "task-1951816" [ 2136.395955] env[63024]: _type = "Task" [ 2136.395955] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2136.403846] env[63024]: DEBUG oslo_vmware.api [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951816, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2136.622475] env[63024]: DEBUG oslo_concurrency.lockutils [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquiring lock "14bafeba-9f5b-4488-b29c-38939973deb9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2136.622722] env[63024]: DEBUG oslo_concurrency.lockutils [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lock "14bafeba-9f5b-4488-b29c-38939973deb9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2136.716741] env[63024]: DEBUG oslo_vmware.api [None req-bbc2dd8d-83b8-4f28-8c1b-b56c59619f29 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Task: {'id': task-1951815, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.133231} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2136.716741] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-bbc2dd8d-83b8-4f28-8c1b-b56c59619f29 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2136.716741] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-bbc2dd8d-83b8-4f28-8c1b-b56c59619f29 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2136.716741] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-bbc2dd8d-83b8-4f28-8c1b-b56c59619f29 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2136.716741] env[63024]: INFO nova.compute.manager [None req-bbc2dd8d-83b8-4f28-8c1b-b56c59619f29 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Took 1.38 seconds to destroy the instance on the hypervisor. [ 2136.716741] env[63024]: DEBUG oslo.service.loopingcall [None req-bbc2dd8d-83b8-4f28-8c1b-b56c59619f29 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2136.716741] env[63024]: DEBUG nova.compute.manager [-] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2136.717184] env[63024]: DEBUG nova.network.neutron [-] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2136.829201] env[63024]: DEBUG oslo_vmware.api [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951813, 'name': CloneVM_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2136.908465] env[63024]: DEBUG oslo_vmware.api [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951816, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2137.019712] env[63024]: DEBUG nova.compute.manager [req-62105ba2-99c0-4ce8-bbc2-4bfdfe89c364 req-c499f6ff-7ef2-40fd-87e1-7ccc35b2ba58 service nova] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Received event network-vif-deleted-d9f698cf-c7f2-403c-92db-98c7ef61b086 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2137.019922] env[63024]: INFO nova.compute.manager [req-62105ba2-99c0-4ce8-bbc2-4bfdfe89c364 req-c499f6ff-7ef2-40fd-87e1-7ccc35b2ba58 service nova] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Neutron deleted interface d9f698cf-c7f2-403c-92db-98c7ef61b086; detaching it from the instance and deleting it from the info cache [ 2137.020372] env[63024]: DEBUG nova.network.neutron [req-62105ba2-99c0-4ce8-bbc2-4bfdfe89c364 req-c499f6ff-7ef2-40fd-87e1-7ccc35b2ba58 service nova] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2137.126782] env[63024]: DEBUG nova.compute.manager [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2137.321310] env[63024]: DEBUG oslo_vmware.api [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951813, 'name': CloneVM_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2137.406319] env[63024]: DEBUG oslo_vmware.api [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951816, 'name': PowerOnVM_Task, 'duration_secs': 0.608091} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2137.406619] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2137.406806] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-532f6201-c4c5-4ac2-861d-5e067ea24d2e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Updating instance 'a483e6b5-a192-4cfe-be36-1ce0667f5697' progress to 100 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2137.489501] env[63024]: DEBUG nova.network.neutron [-] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2137.522504] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e97c19b5-6616-402b-a0bf-7008782bc86a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.532119] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed8f8331-8efe-45fd-b3da-02dbb446baef {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2137.566608] env[63024]: DEBUG nova.compute.manager [req-62105ba2-99c0-4ce8-bbc2-4bfdfe89c364 req-c499f6ff-7ef2-40fd-87e1-7ccc35b2ba58 service nova] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Detach interface failed, port_id=d9f698cf-c7f2-403c-92db-98c7ef61b086, reason: Instance df2933d1-32c3-48a6-8ceb-d5e3047d0b78 could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 2137.650463] env[63024]: DEBUG oslo_concurrency.lockutils [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2137.650738] env[63024]: DEBUG oslo_concurrency.lockutils [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2137.652591] env[63024]: INFO nova.compute.claims [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2137.820331] env[63024]: DEBUG oslo_vmware.api [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951813, 'name': CloneVM_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2137.993040] env[63024]: INFO nova.compute.manager [-] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Took 1.28 seconds to deallocate network for instance. [ 2138.175514] env[63024]: DEBUG oslo_concurrency.lockutils [None req-464bb1c0-803f-40df-b959-986e51181611 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "cb038d54-b785-4930-b8a5-b309c5f4b58d" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2138.175746] env[63024]: DEBUG oslo_concurrency.lockutils [None req-464bb1c0-803f-40df-b959-986e51181611 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "cb038d54-b785-4930-b8a5-b309c5f4b58d" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2138.321211] env[63024]: DEBUG oslo_vmware.api [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951813, 'name': CloneVM_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2138.499255] env[63024]: DEBUG oslo_concurrency.lockutils [None req-bbc2dd8d-83b8-4f28-8c1b-b56c59619f29 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2138.678608] env[63024]: DEBUG nova.compute.utils [None req-464bb1c0-803f-40df-b959-986e51181611 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2138.797039] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87d90c50-2eb1-4d9f-8a50-31e2b51efdc2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2138.804839] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93065c8f-5f01-49c0-a350-58a730b97125 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2138.840091] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5854c7a-1cbb-404e-a0f7-4eb3f160bcf8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2138.850054] env[63024]: DEBUG oslo_vmware.api [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951813, 'name': CloneVM_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2138.851343] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fedcdd01-eed1-4cea-866c-c41734d03035 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2138.865135] env[63024]: DEBUG nova.compute.provider_tree [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2139.181774] env[63024]: DEBUG oslo_concurrency.lockutils [None req-464bb1c0-803f-40df-b959-986e51181611 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "cb038d54-b785-4930-b8a5-b309c5f4b58d" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2139.194374] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e4a8401-38c5-419e-94d0-b4c609a96ff2 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Volume attach. Driver type: vmdk {{(pid=63024) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2139.194617] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e4a8401-38c5-419e-94d0-b4c609a96ff2 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402256', 'volume_id': 'a6b03d8a-56b7-4c68-a7ad-78d907b3d529', 'name': 'volume-a6b03d8a-56b7-4c68-a7ad-78d907b3d529', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5192ad93-a4e9-4aa0-983d-186ab17360f0', 'attached_at': '', 'detached_at': '', 'volume_id': 'a6b03d8a-56b7-4c68-a7ad-78d907b3d529', 'serial': 'a6b03d8a-56b7-4c68-a7ad-78d907b3d529'} {{(pid=63024) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2139.195618] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a18191e5-92d1-411d-9b6c-046d72930221 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2139.212653] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2fbf29e-4753-4c88-bfa3-a55407d8c48c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2139.240535] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e4a8401-38c5-419e-94d0-b4c609a96ff2 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] volume-a6b03d8a-56b7-4c68-a7ad-78d907b3d529/volume-a6b03d8a-56b7-4c68-a7ad-78d907b3d529.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2139.241147] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1f76b8de-3c75-45df-ab11-c829bcd065a3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2139.260061] env[63024]: DEBUG oslo_vmware.api [None req-9e4a8401-38c5-419e-94d0-b4c609a96ff2 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Waiting for the task: (returnval){ [ 2139.260061] env[63024]: value = "task-1951818" [ 2139.260061] env[63024]: _type = "Task" [ 2139.260061] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2139.267766] env[63024]: DEBUG oslo_vmware.api [None req-9e4a8401-38c5-419e-94d0-b4c609a96ff2 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951818, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2139.346436] env[63024]: DEBUG oslo_vmware.api [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951813, 'name': CloneVM_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2139.369346] env[63024]: DEBUG nova.scheduler.client.report [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2139.771207] env[63024]: DEBUG oslo_vmware.api [None req-9e4a8401-38c5-419e-94d0-b4c609a96ff2 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951818, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2139.847596] env[63024]: DEBUG oslo_vmware.api [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951813, 'name': CloneVM_Task, 'duration_secs': 3.848307} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2139.847782] env[63024]: INFO nova.virt.vmwareapi.vmops [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Created linked-clone VM from snapshot [ 2139.848393] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3d63496-8dcd-4a05-91a1-ecfa5608a346 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2139.855364] env[63024]: DEBUG nova.virt.vmwareapi.images [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Uploading image f39727dc-b607-4e0d-ab78-4d4d9d972a30 {{(pid=63024) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2139.874455] env[63024]: DEBUG oslo_concurrency.lockutils [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.224s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2139.874951] env[63024]: DEBUG nova.compute.manager [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2139.880021] env[63024]: DEBUG oslo_concurrency.lockutils [None req-bbc2dd8d-83b8-4f28-8c1b-b56c59619f29 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.380s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2139.880021] env[63024]: DEBUG nova.objects.instance [None req-bbc2dd8d-83b8-4f28-8c1b-b56c59619f29 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lazy-loading 'resources' on Instance uuid df2933d1-32c3-48a6-8ceb-d5e3047d0b78 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2139.885900] env[63024]: DEBUG oslo_vmware.rw_handles [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 2139.885900] env[63024]: value = "vm-402257" [ 2139.885900] env[63024]: _type = "VirtualMachine" [ 2139.885900] env[63024]: }. {{(pid=63024) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 2139.886753] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-71c66cd2-8539-419c-8460-0c2b82172e4f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2139.894989] env[63024]: DEBUG oslo_vmware.rw_handles [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lease: (returnval){ [ 2139.894989] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52dd5573-1222-0f29-74c4-1c5c473897f8" [ 2139.894989] env[63024]: _type = "HttpNfcLease" [ 2139.894989] env[63024]: } obtained for exporting VM: (result){ [ 2139.894989] env[63024]: value = "vm-402257" [ 2139.894989] env[63024]: _type = "VirtualMachine" [ 2139.894989] env[63024]: }. {{(pid=63024) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 2139.895275] env[63024]: DEBUG oslo_vmware.api [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the lease: (returnval){ [ 2139.895275] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52dd5573-1222-0f29-74c4-1c5c473897f8" [ 2139.895275] env[63024]: _type = "HttpNfcLease" [ 2139.895275] env[63024]: } to be ready. {{(pid=63024) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2139.902748] env[63024]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2139.902748] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52dd5573-1222-0f29-74c4-1c5c473897f8" [ 2139.902748] env[63024]: _type = "HttpNfcLease" [ 2139.902748] env[63024]: } is initializing. {{(pid=63024) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2139.997291] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cbea5144-fad8-41d3-8f1d-b9736f3bff3e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "a483e6b5-a192-4cfe-be36-1ce0667f5697" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2139.997564] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cbea5144-fad8-41d3-8f1d-b9736f3bff3e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "a483e6b5-a192-4cfe-be36-1ce0667f5697" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2139.997757] env[63024]: DEBUG nova.compute.manager [None req-cbea5144-fad8-41d3-8f1d-b9736f3bff3e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Going to confirm migration 7 {{(pid=63024) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5113}} [ 2140.243751] env[63024]: DEBUG oslo_concurrency.lockutils [None req-464bb1c0-803f-40df-b959-986e51181611 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "cb038d54-b785-4930-b8a5-b309c5f4b58d" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2140.244128] env[63024]: DEBUG oslo_concurrency.lockutils [None req-464bb1c0-803f-40df-b959-986e51181611 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "cb038d54-b785-4930-b8a5-b309c5f4b58d" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2140.244386] env[63024]: INFO nova.compute.manager [None req-464bb1c0-803f-40df-b959-986e51181611 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Attaching volume 6a11f033-eb79-45c4-ad3c-793e965649b3 to /dev/sdb [ 2140.272333] env[63024]: DEBUG oslo_vmware.api [None req-9e4a8401-38c5-419e-94d0-b4c609a96ff2 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951818, 'name': ReconfigVM_Task, 'duration_secs': 0.696778} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2140.272939] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e4a8401-38c5-419e-94d0-b4c609a96ff2 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Reconfigured VM instance instance-00000068 to attach disk [datastore1] volume-a6b03d8a-56b7-4c68-a7ad-78d907b3d529/volume-a6b03d8a-56b7-4c68-a7ad-78d907b3d529.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2140.278028] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5a376439-c17b-4924-b031-aa83789df009 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.289181] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec79a21e-1259-4e78-9731-d242e0f4ee73 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.296530] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78ca588d-cea0-4bf1-a270-0029c2d6387a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.300266] env[63024]: DEBUG oslo_vmware.api [None req-9e4a8401-38c5-419e-94d0-b4c609a96ff2 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Waiting for the task: (returnval){ [ 2140.300266] env[63024]: value = "task-1951820" [ 2140.300266] env[63024]: _type = "Task" [ 2140.300266] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2140.308327] env[63024]: DEBUG oslo_vmware.api [None req-9e4a8401-38c5-419e-94d0-b4c609a96ff2 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951820, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2140.311612] env[63024]: DEBUG nova.virt.block_device [None req-464bb1c0-803f-40df-b959-986e51181611 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Updating existing volume attachment record: 351ba3fb-e2a3-44e7-b8bc-69fcdb24a1d9 {{(pid=63024) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2140.383097] env[63024]: DEBUG nova.compute.utils [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2140.387805] env[63024]: DEBUG nova.compute.manager [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2140.388192] env[63024]: DEBUG nova.network.neutron [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2140.406014] env[63024]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2140.406014] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52dd5573-1222-0f29-74c4-1c5c473897f8" [ 2140.406014] env[63024]: _type = "HttpNfcLease" [ 2140.406014] env[63024]: } is ready. {{(pid=63024) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2140.406601] env[63024]: DEBUG oslo_vmware.rw_handles [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2140.406601] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52dd5573-1222-0f29-74c4-1c5c473897f8" [ 2140.406601] env[63024]: _type = "HttpNfcLease" [ 2140.406601] env[63024]: }. {{(pid=63024) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 2140.407352] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93986618-7ebc-4091-8369-5d37ec4e2343 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.417621] env[63024]: DEBUG oslo_vmware.rw_handles [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ef557e-dc6e-879c-fcb5-f938176dcacd/disk-0.vmdk from lease info. {{(pid=63024) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2140.417832] env[63024]: DEBUG oslo_vmware.rw_handles [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ef557e-dc6e-879c-fcb5-f938176dcacd/disk-0.vmdk for reading. {{(pid=63024) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 2140.476911] env[63024]: DEBUG nova.policy [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2d70f21af28e4c14a8f7b55090aa435f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dd0c44555e30414c83750b762e243dc1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 2140.525180] env[63024]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-a1d45303-efe3-4ff0-b2e7-d3d9275d9382 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.558604] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cbea5144-fad8-41d3-8f1d-b9736f3bff3e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "refresh_cache-a483e6b5-a192-4cfe-be36-1ce0667f5697" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2140.558757] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cbea5144-fad8-41d3-8f1d-b9736f3bff3e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquired lock "refresh_cache-a483e6b5-a192-4cfe-be36-1ce0667f5697" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2140.558936] env[63024]: DEBUG nova.network.neutron [None req-cbea5144-fad8-41d3-8f1d-b9736f3bff3e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2140.559148] env[63024]: DEBUG nova.objects.instance [None req-cbea5144-fad8-41d3-8f1d-b9736f3bff3e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lazy-loading 'info_cache' on Instance uuid a483e6b5-a192-4cfe-be36-1ce0667f5697 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2140.628545] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48127ce4-2fba-4081-b5e7-e3c9c2c6dd47 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.636081] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b94ef3f0-949d-423d-9e61-6ee5b1b672cb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.666652] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca6aeba5-199f-4749-9db8-00c90572712c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.673723] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eaa3064-50b5-48ed-a4e0-0b97475bd83d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2140.686790] env[63024]: DEBUG nova.compute.provider_tree [None req-bbc2dd8d-83b8-4f28-8c1b-b56c59619f29 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2140.815073] env[63024]: DEBUG oslo_vmware.api [None req-9e4a8401-38c5-419e-94d0-b4c609a96ff2 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951820, 'name': ReconfigVM_Task, 'duration_secs': 0.200551} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2140.815073] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e4a8401-38c5-419e-94d0-b4c609a96ff2 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402256', 'volume_id': 'a6b03d8a-56b7-4c68-a7ad-78d907b3d529', 'name': 'volume-a6b03d8a-56b7-4c68-a7ad-78d907b3d529', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5192ad93-a4e9-4aa0-983d-186ab17360f0', 'attached_at': '', 'detached_at': '', 'volume_id': 'a6b03d8a-56b7-4c68-a7ad-78d907b3d529', 'serial': 'a6b03d8a-56b7-4c68-a7ad-78d907b3d529'} {{(pid=63024) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2140.885028] env[63024]: DEBUG nova.network.neutron [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Successfully created port: 4ba24717-2947-46f0-9df8-733d8b40c345 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2140.888069] env[63024]: DEBUG nova.compute.manager [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2141.189762] env[63024]: DEBUG nova.scheduler.client.report [None req-bbc2dd8d-83b8-4f28-8c1b-b56c59619f29 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2141.695107] env[63024]: DEBUG oslo_concurrency.lockutils [None req-bbc2dd8d-83b8-4f28-8c1b-b56c59619f29 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.816s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2141.719057] env[63024]: INFO nova.scheduler.client.report [None req-bbc2dd8d-83b8-4f28-8c1b-b56c59619f29 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Deleted allocations for instance df2933d1-32c3-48a6-8ceb-d5e3047d0b78 [ 2141.843973] env[63024]: DEBUG nova.network.neutron [None req-cbea5144-fad8-41d3-8f1d-b9736f3bff3e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Updating instance_info_cache with network_info: [{"id": "d9aaf9d8-7a03-4fe5-8494-40497e138b13", "address": "fa:16:3e:37:15:c7", "network": {"id": "8b46e914-e3e8-40c4-8f9d-01d1f97bf25e", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-1558746173-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6bbfeec6d47746328f185acd132e0d5a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "afae6acd-1873-4228-9d5a-1cd5d4efe3e4", "external-id": "nsx-vlan-transportzone-183", "segmentation_id": 183, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd9aaf9d8-7a", "ovs_interfaceid": "d9aaf9d8-7a03-4fe5-8494-40497e138b13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2141.859030] env[63024]: DEBUG nova.objects.instance [None req-9e4a8401-38c5-419e-94d0-b4c609a96ff2 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lazy-loading 'flavor' on Instance uuid 5192ad93-a4e9-4aa0-983d-186ab17360f0 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2141.898756] env[63024]: DEBUG nova.compute.manager [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2141.929446] env[63024]: DEBUG nova.virt.hardware [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2141.929713] env[63024]: DEBUG nova.virt.hardware [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2141.929870] env[63024]: DEBUG nova.virt.hardware [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2141.930064] env[63024]: DEBUG nova.virt.hardware [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2141.930219] env[63024]: DEBUG nova.virt.hardware [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2141.930366] env[63024]: DEBUG nova.virt.hardware [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2141.930572] env[63024]: DEBUG nova.virt.hardware [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2141.930731] env[63024]: DEBUG nova.virt.hardware [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2141.930900] env[63024]: DEBUG nova.virt.hardware [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2141.931075] env[63024]: DEBUG nova.virt.hardware [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2141.931261] env[63024]: DEBUG nova.virt.hardware [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2141.933044] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cdc053d-898f-4291-a1af-0594f2ff1760 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2141.941915] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d049b2e6-c0cd-46e9-87ed-15ee695968a1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.227340] env[63024]: DEBUG oslo_concurrency.lockutils [None req-bbc2dd8d-83b8-4f28-8c1b-b56c59619f29 tempest-ServersTestJSON-2007614233 tempest-ServersTestJSON-2007614233-project-member] Lock "df2933d1-32c3-48a6-8ceb-d5e3047d0b78" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.398s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2142.308535] env[63024]: DEBUG nova.compute.manager [req-f5f273aa-5912-4e5c-9254-88b8d1c3b567 req-cb7703a7-809f-47e8-a3e0-d83cf15dff77 service nova] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Received event network-vif-plugged-4ba24717-2947-46f0-9df8-733d8b40c345 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2142.308535] env[63024]: DEBUG oslo_concurrency.lockutils [req-f5f273aa-5912-4e5c-9254-88b8d1c3b567 req-cb7703a7-809f-47e8-a3e0-d83cf15dff77 service nova] Acquiring lock "14bafeba-9f5b-4488-b29c-38939973deb9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2142.308535] env[63024]: DEBUG oslo_concurrency.lockutils [req-f5f273aa-5912-4e5c-9254-88b8d1c3b567 req-cb7703a7-809f-47e8-a3e0-d83cf15dff77 service nova] Lock "14bafeba-9f5b-4488-b29c-38939973deb9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2142.308535] env[63024]: DEBUG oslo_concurrency.lockutils [req-f5f273aa-5912-4e5c-9254-88b8d1c3b567 req-cb7703a7-809f-47e8-a3e0-d83cf15dff77 service nova] Lock "14bafeba-9f5b-4488-b29c-38939973deb9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2142.308535] env[63024]: DEBUG nova.compute.manager [req-f5f273aa-5912-4e5c-9254-88b8d1c3b567 req-cb7703a7-809f-47e8-a3e0-d83cf15dff77 service nova] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] No waiting events found dispatching network-vif-plugged-4ba24717-2947-46f0-9df8-733d8b40c345 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2142.308535] env[63024]: WARNING nova.compute.manager [req-f5f273aa-5912-4e5c-9254-88b8d1c3b567 req-cb7703a7-809f-47e8-a3e0-d83cf15dff77 service nova] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Received unexpected event network-vif-plugged-4ba24717-2947-46f0-9df8-733d8b40c345 for instance with vm_state building and task_state spawning. [ 2142.346383] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cbea5144-fad8-41d3-8f1d-b9736f3bff3e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Releasing lock "refresh_cache-a483e6b5-a192-4cfe-be36-1ce0667f5697" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2142.348209] env[63024]: DEBUG nova.objects.instance [None req-cbea5144-fad8-41d3-8f1d-b9736f3bff3e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lazy-loading 'migration_context' on Instance uuid a483e6b5-a192-4cfe-be36-1ce0667f5697 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2142.368596] env[63024]: DEBUG oslo_concurrency.lockutils [None req-16490d5e-29ce-4ea5-9b24-1a3217d8cf8d tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Acquiring lock "5192ad93-a4e9-4aa0-983d-186ab17360f0" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2142.368596] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9e4a8401-38c5-419e-94d0-b4c609a96ff2 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lock "5192ad93-a4e9-4aa0-983d-186ab17360f0" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.781s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2142.369064] env[63024]: DEBUG oslo_concurrency.lockutils [None req-16490d5e-29ce-4ea5-9b24-1a3217d8cf8d tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lock "5192ad93-a4e9-4aa0-983d-186ab17360f0" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.003s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2142.400556] env[63024]: DEBUG nova.network.neutron [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Successfully updated port: 4ba24717-2947-46f0-9df8-733d8b40c345 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2142.850730] env[63024]: DEBUG nova.objects.base [None req-cbea5144-fad8-41d3-8f1d-b9736f3bff3e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=63024) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2142.851719] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b696730-9b27-49ee-8bf7-c69c84acebf1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.873554] env[63024]: INFO nova.compute.manager [None req-16490d5e-29ce-4ea5-9b24-1a3217d8cf8d tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Detaching volume fd669326-4ffa-49a6-9107-74d2b957b5f4 [ 2142.876490] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d07d0ba5-addb-4ce6-ab89-ae0401612055 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.883931] env[63024]: DEBUG oslo_vmware.api [None req-cbea5144-fad8-41d3-8f1d-b9736f3bff3e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 2142.883931] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]528d60e4-973f-8180-8897-14d333f6ec3a" [ 2142.883931] env[63024]: _type = "Task" [ 2142.883931] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2142.892403] env[63024]: DEBUG oslo_vmware.api [None req-cbea5144-fad8-41d3-8f1d-b9736f3bff3e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]528d60e4-973f-8180-8897-14d333f6ec3a, 'name': SearchDatastore_Task, 'duration_secs': 0.00683} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2142.892715] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cbea5144-fad8-41d3-8f1d-b9736f3bff3e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2142.892974] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cbea5144-fad8-41d3-8f1d-b9736f3bff3e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2142.902213] env[63024]: DEBUG oslo_concurrency.lockutils [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquiring lock "refresh_cache-14bafeba-9f5b-4488-b29c-38939973deb9" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2142.902325] env[63024]: DEBUG oslo_concurrency.lockutils [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquired lock "refresh_cache-14bafeba-9f5b-4488-b29c-38939973deb9" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2142.902434] env[63024]: DEBUG nova.network.neutron [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2142.911206] env[63024]: INFO nova.virt.block_device [None req-16490d5e-29ce-4ea5-9b24-1a3217d8cf8d tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Attempting to driver detach volume fd669326-4ffa-49a6-9107-74d2b957b5f4 from mountpoint /dev/sdb [ 2142.911426] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-16490d5e-29ce-4ea5-9b24-1a3217d8cf8d tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Volume detach. Driver type: vmdk {{(pid=63024) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2142.911668] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-16490d5e-29ce-4ea5-9b24-1a3217d8cf8d tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402253', 'volume_id': 'fd669326-4ffa-49a6-9107-74d2b957b5f4', 'name': 'volume-fd669326-4ffa-49a6-9107-74d2b957b5f4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5192ad93-a4e9-4aa0-983d-186ab17360f0', 'attached_at': '', 'detached_at': '', 'volume_id': 'fd669326-4ffa-49a6-9107-74d2b957b5f4', 'serial': 'fd669326-4ffa-49a6-9107-74d2b957b5f4'} {{(pid=63024) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2142.912425] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cc79621-de1b-4058-91ba-67cc8a85032b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.936760] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26081f94-0fce-40db-830e-b7702eb40271 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.944556] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5136dfaf-22fe-4833-a749-67b4cf620f0c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.969422] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca3e13f1-1f17-466d-98a8-6ca5afa42272 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2142.985771] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-16490d5e-29ce-4ea5-9b24-1a3217d8cf8d tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] The volume has not been displaced from its original location: [datastore1] volume-fd669326-4ffa-49a6-9107-74d2b957b5f4/volume-fd669326-4ffa-49a6-9107-74d2b957b5f4.vmdk. No consolidation needed. {{(pid=63024) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2142.989985] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-16490d5e-29ce-4ea5-9b24-1a3217d8cf8d tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Reconfiguring VM instance instance-00000068 to detach disk 2001 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2142.990326] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f94c1527-4a17-4229-8467-0994011088fe {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.011878] env[63024]: DEBUG oslo_vmware.api [None req-16490d5e-29ce-4ea5-9b24-1a3217d8cf8d tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Waiting for the task: (returnval){ [ 2143.011878] env[63024]: value = "task-1951825" [ 2143.011878] env[63024]: _type = "Task" [ 2143.011878] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2143.019764] env[63024]: DEBUG oslo_vmware.api [None req-16490d5e-29ce-4ea5-9b24-1a3217d8cf8d tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951825, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2143.450979] env[63024]: DEBUG nova.network.neutron [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2143.507490] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Didn't find any instances for network info cache update. {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10344}} [ 2143.507891] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2143.508077] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2143.508239] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2143.508372] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63024) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10877}} [ 2143.508535] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2143.508656] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Cleaning up deleted instances {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11558}} [ 2143.521434] env[63024]: DEBUG oslo_vmware.api [None req-16490d5e-29ce-4ea5-9b24-1a3217d8cf8d tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951825, 'name': ReconfigVM_Task, 'duration_secs': 0.394219} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2143.523895] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-16490d5e-29ce-4ea5-9b24-1a3217d8cf8d tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Reconfigured VM instance instance-00000068 to detach disk 2001 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2143.528796] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7acfe32c-58dc-45ab-8d13-e37a0ab44011 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.543170] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9654e3a-c66a-4685-8722-6d384f6984af {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.547141] env[63024]: DEBUG oslo_vmware.api [None req-16490d5e-29ce-4ea5-9b24-1a3217d8cf8d tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Waiting for the task: (returnval){ [ 2143.547141] env[63024]: value = "task-1951826" [ 2143.547141] env[63024]: _type = "Task" [ 2143.547141] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2143.557582] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b3962d9-35a7-4d21-a58d-1e7c0e1b728f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.565410] env[63024]: DEBUG oslo_vmware.api [None req-16490d5e-29ce-4ea5-9b24-1a3217d8cf8d tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951826, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2143.596195] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5dd071e-59a2-4c43-933b-bdf673e81127 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.604493] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e48ddb3-125b-462c-9a8e-343528c7b5d9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2143.619326] env[63024]: DEBUG nova.compute.provider_tree [None req-cbea5144-fad8-41d3-8f1d-b9736f3bff3e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2143.721994] env[63024]: DEBUG nova.network.neutron [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Updating instance_info_cache with network_info: [{"id": "4ba24717-2947-46f0-9df8-733d8b40c345", "address": "fa:16:3e:3b:41:f5", "network": {"id": "5503f5f5-9a63-4bb8-bc39-97863100c3df", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1218611362-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd0c44555e30414c83750b762e243dc1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ba24717-29", "ovs_interfaceid": "4ba24717-2947-46f0-9df8-733d8b40c345", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2144.026729] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] There are 63 instances to clean {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11567}} [ 2144.027044] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 3f350c3e-e9b3-4798-a424-fd32235d21cf] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2144.057988] env[63024]: DEBUG oslo_vmware.api [None req-16490d5e-29ce-4ea5-9b24-1a3217d8cf8d tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951826, 'name': ReconfigVM_Task, 'duration_secs': 0.16946} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2144.058324] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-16490d5e-29ce-4ea5-9b24-1a3217d8cf8d tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402253', 'volume_id': 'fd669326-4ffa-49a6-9107-74d2b957b5f4', 'name': 'volume-fd669326-4ffa-49a6-9107-74d2b957b5f4', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5192ad93-a4e9-4aa0-983d-186ab17360f0', 'attached_at': '', 'detached_at': '', 'volume_id': 'fd669326-4ffa-49a6-9107-74d2b957b5f4', 'serial': 'fd669326-4ffa-49a6-9107-74d2b957b5f4'} {{(pid=63024) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2144.122940] env[63024]: DEBUG nova.scheduler.client.report [None req-cbea5144-fad8-41d3-8f1d-b9736f3bff3e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2144.223129] env[63024]: DEBUG oslo_concurrency.lockutils [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Releasing lock "refresh_cache-14bafeba-9f5b-4488-b29c-38939973deb9" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2144.223493] env[63024]: DEBUG nova.compute.manager [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Instance network_info: |[{"id": "4ba24717-2947-46f0-9df8-733d8b40c345", "address": "fa:16:3e:3b:41:f5", "network": {"id": "5503f5f5-9a63-4bb8-bc39-97863100c3df", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1218611362-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd0c44555e30414c83750b762e243dc1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ba24717-29", "ovs_interfaceid": "4ba24717-2947-46f0-9df8-733d8b40c345", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2144.223939] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3b:41:f5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '55c757ac-f8b2-466d-b634-07dbd100b312', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4ba24717-2947-46f0-9df8-733d8b40c345', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2144.231409] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Creating folder: Project (dd0c44555e30414c83750b762e243dc1). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2144.231765] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b6316a5d-65fa-4f6c-bf8f-c588ab458853 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.244407] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Created folder: Project (dd0c44555e30414c83750b762e243dc1) in parent group-v401959. [ 2144.244626] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Creating folder: Instances. Parent ref: group-v402260. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2144.244892] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8f65a62a-77b3-4298-8134-115eb6db4c84 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.255054] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Created folder: Instances in parent group-v402260. [ 2144.255146] env[63024]: DEBUG oslo.service.loopingcall [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2144.255312] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2144.255526] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d9ddeec2-a494-40e9-b35e-54d3a6da6e33 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.275617] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2144.275617] env[63024]: value = "task-1951829" [ 2144.275617] env[63024]: _type = "Task" [ 2144.275617] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2144.289702] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951829, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2144.341062] env[63024]: DEBUG nova.compute.manager [req-84d6416e-c630-4876-b61a-fd09dc92ae69 req-815166f8-2d2f-4cc5-b8a1-b30f4fa50c82 service nova] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Received event network-changed-4ba24717-2947-46f0-9df8-733d8b40c345 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2144.341284] env[63024]: DEBUG nova.compute.manager [req-84d6416e-c630-4876-b61a-fd09dc92ae69 req-815166f8-2d2f-4cc5-b8a1-b30f4fa50c82 service nova] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Refreshing instance network info cache due to event network-changed-4ba24717-2947-46f0-9df8-733d8b40c345. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2144.341493] env[63024]: DEBUG oslo_concurrency.lockutils [req-84d6416e-c630-4876-b61a-fd09dc92ae69 req-815166f8-2d2f-4cc5-b8a1-b30f4fa50c82 service nova] Acquiring lock "refresh_cache-14bafeba-9f5b-4488-b29c-38939973deb9" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2144.341572] env[63024]: DEBUG oslo_concurrency.lockutils [req-84d6416e-c630-4876-b61a-fd09dc92ae69 req-815166f8-2d2f-4cc5-b8a1-b30f4fa50c82 service nova] Acquired lock "refresh_cache-14bafeba-9f5b-4488-b29c-38939973deb9" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2144.342357] env[63024]: DEBUG nova.network.neutron [req-84d6416e-c630-4876-b61a-fd09dc92ae69 req-815166f8-2d2f-4cc5-b8a1-b30f4fa50c82 service nova] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Refreshing network info cache for port 4ba24717-2947-46f0-9df8-733d8b40c345 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2144.530272] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: e4d6e79b-f110-44c2-8201-926b57eeb68d] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2144.606958] env[63024]: DEBUG nova.objects.instance [None req-16490d5e-29ce-4ea5-9b24-1a3217d8cf8d tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lazy-loading 'flavor' on Instance uuid 5192ad93-a4e9-4aa0-983d-186ab17360f0 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2144.786539] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951829, 'name': CreateVM_Task, 'duration_secs': 0.363218} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2144.786723] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2144.787443] env[63024]: DEBUG oslo_concurrency.lockutils [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2144.787613] env[63024]: DEBUG oslo_concurrency.lockutils [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2144.787948] env[63024]: DEBUG oslo_concurrency.lockutils [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2144.788232] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d280ec00-d3b1-41c2-862e-a2b4c5815def {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2144.793011] env[63024]: DEBUG oslo_vmware.api [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2144.793011] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52aa2cf3-da55-ce4e-d519-d6b3d8148f0b" [ 2144.793011] env[63024]: _type = "Task" [ 2144.793011] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2144.800891] env[63024]: DEBUG oslo_vmware.api [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52aa2cf3-da55-ce4e-d519-d6b3d8148f0b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2145.033812] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 1666cff0-59bd-41a0-aa3c-d1e8fac3a49a] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2145.090667] env[63024]: DEBUG nova.network.neutron [req-84d6416e-c630-4876-b61a-fd09dc92ae69 req-815166f8-2d2f-4cc5-b8a1-b30f4fa50c82 service nova] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Updated VIF entry in instance network info cache for port 4ba24717-2947-46f0-9df8-733d8b40c345. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2145.091060] env[63024]: DEBUG nova.network.neutron [req-84d6416e-c630-4876-b61a-fd09dc92ae69 req-815166f8-2d2f-4cc5-b8a1-b30f4fa50c82 service nova] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Updating instance_info_cache with network_info: [{"id": "4ba24717-2947-46f0-9df8-733d8b40c345", "address": "fa:16:3e:3b:41:f5", "network": {"id": "5503f5f5-9a63-4bb8-bc39-97863100c3df", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1218611362-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd0c44555e30414c83750b762e243dc1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ba24717-29", "ovs_interfaceid": "4ba24717-2947-46f0-9df8-733d8b40c345", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2145.135024] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cbea5144-fad8-41d3-8f1d-b9736f3bff3e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.242s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2145.304130] env[63024]: DEBUG oslo_vmware.api [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52aa2cf3-da55-ce4e-d519-d6b3d8148f0b, 'name': SearchDatastore_Task, 'duration_secs': 0.010153} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2145.304458] env[63024]: DEBUG oslo_concurrency.lockutils [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2145.304694] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2145.304936] env[63024]: DEBUG oslo_concurrency.lockutils [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2145.305110] env[63024]: DEBUG oslo_concurrency.lockutils [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2145.305312] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2145.305583] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9e723e6c-5161-4e83-88ae-a60f9f716def {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.315220] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2145.315315] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2145.316180] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2bf20bf-b9bf-490d-a8b7-2717acdcbd0a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.323209] env[63024]: DEBUG oslo_vmware.api [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2145.323209] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52717b08-5938-59c1-a252-91bb9c069c0e" [ 2145.323209] env[63024]: _type = "Task" [ 2145.323209] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2145.330096] env[63024]: DEBUG oslo_vmware.api [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52717b08-5938-59c1-a252-91bb9c069c0e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2145.358863] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-464bb1c0-803f-40df-b959-986e51181611 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Volume attach. Driver type: vmdk {{(pid=63024) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2145.359151] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-464bb1c0-803f-40df-b959-986e51181611 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402259', 'volume_id': '6a11f033-eb79-45c4-ad3c-793e965649b3', 'name': 'volume-6a11f033-eb79-45c4-ad3c-793e965649b3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'cb038d54-b785-4930-b8a5-b309c5f4b58d', 'attached_at': '', 'detached_at': '', 'volume_id': '6a11f033-eb79-45c4-ad3c-793e965649b3', 'serial': '6a11f033-eb79-45c4-ad3c-793e965649b3'} {{(pid=63024) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2145.360048] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd6c9a7e-8ff7-4953-8ab3-4800a853525e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.377534] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35a59195-c94c-43dd-a11c-5eb8a99201e4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.401265] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-464bb1c0-803f-40df-b959-986e51181611 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] volume-6a11f033-eb79-45c4-ad3c-793e965649b3/volume-6a11f033-eb79-45c4-ad3c-793e965649b3.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2145.401601] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8cff255c-582d-405e-8671-da6fadbd526d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.420835] env[63024]: DEBUG oslo_vmware.api [None req-464bb1c0-803f-40df-b959-986e51181611 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2145.420835] env[63024]: value = "task-1951830" [ 2145.420835] env[63024]: _type = "Task" [ 2145.420835] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2145.431624] env[63024]: DEBUG oslo_vmware.api [None req-464bb1c0-803f-40df-b959-986e51181611 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951830, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2145.451433] env[63024]: INFO nova.compute.manager [None req-2dee9808-46e4-4fb1-acd2-28afb8bef576 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Get console output [ 2145.451939] env[63024]: WARNING nova.virt.vmwareapi.driver [None req-2dee9808-46e4-4fb1-acd2-28afb8bef576 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] The console log is missing. Check your VSPC configuration [ 2145.537655] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 2aa624cb-b36a-43c9-8407-37383f196563] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2145.595215] env[63024]: DEBUG oslo_concurrency.lockutils [req-84d6416e-c630-4876-b61a-fd09dc92ae69 req-815166f8-2d2f-4cc5-b8a1-b30f4fa50c82 service nova] Releasing lock "refresh_cache-14bafeba-9f5b-4488-b29c-38939973deb9" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2145.618476] env[63024]: DEBUG oslo_concurrency.lockutils [None req-16490d5e-29ce-4ea5-9b24-1a3217d8cf8d tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lock "5192ad93-a4e9-4aa0-983d-186ab17360f0" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.249s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2145.661330] env[63024]: DEBUG oslo_concurrency.lockutils [None req-50b716ed-d3d7-4f02-aba3-77221f245e6d tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Acquiring lock "5192ad93-a4e9-4aa0-983d-186ab17360f0" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2145.661578] env[63024]: DEBUG oslo_concurrency.lockutils [None req-50b716ed-d3d7-4f02-aba3-77221f245e6d tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lock "5192ad93-a4e9-4aa0-983d-186ab17360f0" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2145.696895] env[63024]: INFO nova.scheduler.client.report [None req-cbea5144-fad8-41d3-8f1d-b9736f3bff3e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Deleted allocation for migration f50108e4-2aa2-4aaa-b88b-5fb59917d283 [ 2145.834074] env[63024]: DEBUG oslo_vmware.api [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52717b08-5938-59c1-a252-91bb9c069c0e, 'name': SearchDatastore_Task, 'duration_secs': 0.008969} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2145.834487] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf053fba-f5b0-45b2-91c9-1b866ce18f38 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2145.839911] env[63024]: DEBUG oslo_vmware.api [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2145.839911] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52f29e10-b149-cb67-e575-033852d85444" [ 2145.839911] env[63024]: _type = "Task" [ 2145.839911] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2145.847853] env[63024]: DEBUG oslo_vmware.api [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52f29e10-b149-cb67-e575-033852d85444, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2145.931190] env[63024]: DEBUG oslo_vmware.api [None req-464bb1c0-803f-40df-b959-986e51181611 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951830, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2146.042224] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 881b1f35-206e-4c3f-bf7a-d1774a9343c2] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2146.164657] env[63024]: INFO nova.compute.manager [None req-50b716ed-d3d7-4f02-aba3-77221f245e6d tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Detaching volume a6b03d8a-56b7-4c68-a7ad-78d907b3d529 [ 2146.204151] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cbea5144-fad8-41d3-8f1d-b9736f3bff3e tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "a483e6b5-a192-4cfe-be36-1ce0667f5697" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.206s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2146.315917] env[63024]: INFO nova.virt.block_device [None req-50b716ed-d3d7-4f02-aba3-77221f245e6d tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Attempting to driver detach volume a6b03d8a-56b7-4c68-a7ad-78d907b3d529 from mountpoint /dev/sdc [ 2146.316323] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-50b716ed-d3d7-4f02-aba3-77221f245e6d tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Volume detach. Driver type: vmdk {{(pid=63024) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2146.316599] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-50b716ed-d3d7-4f02-aba3-77221f245e6d tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402256', 'volume_id': 'a6b03d8a-56b7-4c68-a7ad-78d907b3d529', 'name': 'volume-a6b03d8a-56b7-4c68-a7ad-78d907b3d529', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5192ad93-a4e9-4aa0-983d-186ab17360f0', 'attached_at': '', 'detached_at': '', 'volume_id': 'a6b03d8a-56b7-4c68-a7ad-78d907b3d529', 'serial': 'a6b03d8a-56b7-4c68-a7ad-78d907b3d529'} {{(pid=63024) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2146.317692] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bc48443-d24c-4813-b3f9-5eabb38b8fda {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.348883] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d777fdf6-384b-49cf-be96-2c3fcb9936b9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.359026] env[63024]: DEBUG oslo_vmware.api [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52f29e10-b149-cb67-e575-033852d85444, 'name': SearchDatastore_Task, 'duration_secs': 0.009855} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2146.361252] env[63024]: DEBUG oslo_concurrency.lockutils [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2146.361627] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 14bafeba-9f5b-4488-b29c-38939973deb9/14bafeba-9f5b-4488-b29c-38939973deb9.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2146.362132] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-387cfe09-be66-4a75-82a5-65d714c4c09e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.365085] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0225754f-46ed-4798-9812-b35c966bb2b6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.389942] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b4b9967-d4db-46f4-97aa-d93365944b5b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.392752] env[63024]: DEBUG oslo_vmware.api [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2146.392752] env[63024]: value = "task-1951831" [ 2146.392752] env[63024]: _type = "Task" [ 2146.392752] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2146.406575] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-50b716ed-d3d7-4f02-aba3-77221f245e6d tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] The volume has not been displaced from its original location: [datastore1] volume-a6b03d8a-56b7-4c68-a7ad-78d907b3d529/volume-a6b03d8a-56b7-4c68-a7ad-78d907b3d529.vmdk. No consolidation needed. {{(pid=63024) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2146.412502] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-50b716ed-d3d7-4f02-aba3-77221f245e6d tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Reconfiguring VM instance instance-00000068 to detach disk 2002 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2146.413315] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-12dabe8c-759a-4ae3-b80f-89b71f59474a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.429633] env[63024]: DEBUG oslo_vmware.api [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951831, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2146.443245] env[63024]: DEBUG oslo_vmware.api [None req-464bb1c0-803f-40df-b959-986e51181611 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951830, 'name': ReconfigVM_Task, 'duration_secs': 0.721065} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2146.443245] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-464bb1c0-803f-40df-b959-986e51181611 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Reconfigured VM instance instance-0000006d to attach disk [datastore1] volume-6a11f033-eb79-45c4-ad3c-793e965649b3/volume-6a11f033-eb79-45c4-ad3c-793e965649b3.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2146.446661] env[63024]: DEBUG oslo_vmware.api [None req-50b716ed-d3d7-4f02-aba3-77221f245e6d tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Waiting for the task: (returnval){ [ 2146.446661] env[63024]: value = "task-1951832" [ 2146.446661] env[63024]: _type = "Task" [ 2146.446661] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2146.446661] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-02d4efe4-fa69-43c8-ba6c-76235d82d060 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.465388] env[63024]: DEBUG oslo_vmware.api [None req-50b716ed-d3d7-4f02-aba3-77221f245e6d tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951832, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2146.466726] env[63024]: DEBUG oslo_vmware.api [None req-464bb1c0-803f-40df-b959-986e51181611 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2146.466726] env[63024]: value = "task-1951833" [ 2146.466726] env[63024]: _type = "Task" [ 2146.466726] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2146.474836] env[63024]: DEBUG oslo_vmware.api [None req-464bb1c0-803f-40df-b959-986e51181611 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951833, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2146.546236] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: c5541241-84e2-4216-b6f9-4c716f29d759] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2146.903141] env[63024]: DEBUG oslo_vmware.api [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951831, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.474195} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2146.903417] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 14bafeba-9f5b-4488-b29c-38939973deb9/14bafeba-9f5b-4488-b29c-38939973deb9.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2146.903633] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2146.903892] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-93fd3c1d-061d-4a6e-acd3-c8eb0f59f609 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2146.911512] env[63024]: DEBUG oslo_vmware.api [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2146.911512] env[63024]: value = "task-1951834" [ 2146.911512] env[63024]: _type = "Task" [ 2146.911512] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2146.920297] env[63024]: DEBUG oslo_vmware.api [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951834, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2146.968963] env[63024]: DEBUG oslo_vmware.api [None req-50b716ed-d3d7-4f02-aba3-77221f245e6d tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951832, 'name': ReconfigVM_Task, 'duration_secs': 0.260342} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2146.977163] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-50b716ed-d3d7-4f02-aba3-77221f245e6d tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Reconfigured VM instance instance-00000068 to detach disk 2002 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2146.981886] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eb3d5926-fc2b-4db1-a12b-8f67e56fd2d4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.003672] env[63024]: DEBUG oslo_vmware.api [None req-464bb1c0-803f-40df-b959-986e51181611 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951833, 'name': ReconfigVM_Task, 'duration_secs': 0.153224} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2147.006588] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-464bb1c0-803f-40df-b959-986e51181611 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402259', 'volume_id': '6a11f033-eb79-45c4-ad3c-793e965649b3', 'name': 'volume-6a11f033-eb79-45c4-ad3c-793e965649b3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'cb038d54-b785-4930-b8a5-b309c5f4b58d', 'attached_at': '', 'detached_at': '', 'volume_id': '6a11f033-eb79-45c4-ad3c-793e965649b3', 'serial': '6a11f033-eb79-45c4-ad3c-793e965649b3'} {{(pid=63024) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2147.008176] env[63024]: DEBUG oslo_vmware.api [None req-50b716ed-d3d7-4f02-aba3-77221f245e6d tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Waiting for the task: (returnval){ [ 2147.008176] env[63024]: value = "task-1951835" [ 2147.008176] env[63024]: _type = "Task" [ 2147.008176] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2147.020292] env[63024]: DEBUG oslo_vmware.api [None req-50b716ed-d3d7-4f02-aba3-77221f245e6d tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951835, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2147.049863] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 8826c266-659c-46ad-bb02-aefdffab8699] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2147.422574] env[63024]: DEBUG oslo_vmware.api [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951834, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066015} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2147.422837] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2147.423689] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f555302e-75fa-47e5-b6a3-a066f41f3f7f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.446515] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Reconfiguring VM instance instance-00000073 to attach disk [datastore1] 14bafeba-9f5b-4488-b29c-38939973deb9/14bafeba-9f5b-4488-b29c-38939973deb9.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2147.446861] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2617aa7b-fd83-4378-b07e-ab1c85b309e4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.467772] env[63024]: DEBUG oslo_vmware.api [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2147.467772] env[63024]: value = "task-1951836" [ 2147.467772] env[63024]: _type = "Task" [ 2147.467772] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2147.490194] env[63024]: DEBUG oslo_vmware.api [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951836, 'name': ReconfigVM_Task} progress is 10%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2147.536136] env[63024]: DEBUG oslo_vmware.api [None req-50b716ed-d3d7-4f02-aba3-77221f245e6d tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951835, 'name': ReconfigVM_Task, 'duration_secs': 0.152641} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2147.536499] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-50b716ed-d3d7-4f02-aba3-77221f245e6d tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402256', 'volume_id': 'a6b03d8a-56b7-4c68-a7ad-78d907b3d529', 'name': 'volume-a6b03d8a-56b7-4c68-a7ad-78d907b3d529', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5192ad93-a4e9-4aa0-983d-186ab17360f0', 'attached_at': '', 'detached_at': '', 'volume_id': 'a6b03d8a-56b7-4c68-a7ad-78d907b3d529', 'serial': 'a6b03d8a-56b7-4c68-a7ad-78d907b3d529'} {{(pid=63024) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2147.554346] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: df5a197c-8e35-44a0-8b9c-63dae50b77ff] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2147.978164] env[63024]: DEBUG oslo_vmware.api [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951836, 'name': ReconfigVM_Task, 'duration_secs': 0.302673} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2147.978460] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Reconfigured VM instance instance-00000073 to attach disk [datastore1] 14bafeba-9f5b-4488-b29c-38939973deb9/14bafeba-9f5b-4488-b29c-38939973deb9.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2147.979110] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ef323bc3-b3c4-4505-ae4e-d6d4cbd034ab {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2147.986034] env[63024]: DEBUG oslo_vmware.api [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2147.986034] env[63024]: value = "task-1951837" [ 2147.986034] env[63024]: _type = "Task" [ 2147.986034] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2147.993973] env[63024]: DEBUG oslo_vmware.api [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951837, 'name': Rename_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2148.060324] env[63024]: DEBUG nova.objects.instance [None req-464bb1c0-803f-40df-b959-986e51181611 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lazy-loading 'flavor' on Instance uuid cb038d54-b785-4930-b8a5-b309c5f4b58d {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2148.062100] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 3cf2726c-2551-4bbd-8032-006062cdcc39] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2148.085243] env[63024]: DEBUG nova.objects.instance [None req-50b716ed-d3d7-4f02-aba3-77221f245e6d tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lazy-loading 'flavor' on Instance uuid 5192ad93-a4e9-4aa0-983d-186ab17360f0 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2148.496022] env[63024]: DEBUG oslo_vmware.api [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951837, 'name': Rename_Task, 'duration_secs': 0.153425} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2148.496338] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2148.496596] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5608019c-0cab-4a47-920e-f399cc85a484 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.503085] env[63024]: DEBUG oslo_vmware.api [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2148.503085] env[63024]: value = "task-1951838" [ 2148.503085] env[63024]: _type = "Task" [ 2148.503085] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2148.510544] env[63024]: DEBUG oslo_vmware.api [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951838, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2148.567429] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 85f31573-5535-4712-b736-747c43ed74b3] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2148.569678] env[63024]: DEBUG oslo_concurrency.lockutils [None req-464bb1c0-803f-40df-b959-986e51181611 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "cb038d54-b785-4930-b8a5-b309c5f4b58d" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.326s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2148.621714] env[63024]: DEBUG oslo_vmware.rw_handles [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ef557e-dc6e-879c-fcb5-f938176dcacd/disk-0.vmdk. {{(pid=63024) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2148.622704] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3730c16a-dc8b-4bec-af3b-a42ae8dd9cde {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.629636] env[63024]: DEBUG oslo_vmware.rw_handles [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ef557e-dc6e-879c-fcb5-f938176dcacd/disk-0.vmdk is in state: ready. {{(pid=63024) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2148.629807] env[63024]: ERROR oslo_vmware.rw_handles [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ef557e-dc6e-879c-fcb5-f938176dcacd/disk-0.vmdk due to incomplete transfer. [ 2148.630052] env[63024]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-f14969a2-fb8f-4745-a0a6-5d72f4b865c1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.637493] env[63024]: DEBUG oslo_vmware.rw_handles [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ef557e-dc6e-879c-fcb5-f938176dcacd/disk-0.vmdk. {{(pid=63024) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 2148.637696] env[63024]: DEBUG nova.virt.vmwareapi.images [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Uploaded image f39727dc-b607-4e0d-ab78-4d4d9d972a30 to the Glance image server {{(pid=63024) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 2148.639955] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Destroying the VM {{(pid=63024) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2148.640217] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-ccbbf5c3-4d8a-4bcc-a8ab-33bb74509086 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2148.648053] env[63024]: DEBUG oslo_vmware.api [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2148.648053] env[63024]: value = "task-1951839" [ 2148.648053] env[63024]: _type = "Task" [ 2148.648053] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2148.658390] env[63024]: DEBUG oslo_vmware.api [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951839, 'name': Destroy_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2149.012720] env[63024]: DEBUG oslo_vmware.api [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951838, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2149.071526] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 34e4db8e-e0d9-4a27-9368-c5e711b51a29] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2149.099067] env[63024]: DEBUG oslo_concurrency.lockutils [None req-50b716ed-d3d7-4f02-aba3-77221f245e6d tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lock "5192ad93-a4e9-4aa0-983d-186ab17360f0" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.437s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2149.157841] env[63024]: DEBUG oslo_vmware.api [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951839, 'name': Destroy_Task, 'duration_secs': 0.380175} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2149.159301] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Destroyed the VM [ 2149.159301] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Deleting Snapshot of the VM instance {{(pid=63024) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2149.159301] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-2a4b052c-b65a-4a51-81b0-e3a8b0593f31 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.165475] env[63024]: DEBUG oslo_vmware.api [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2149.165475] env[63024]: value = "task-1951840" [ 2149.165475] env[63024]: _type = "Task" [ 2149.165475] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2149.173349] env[63024]: DEBUG oslo_vmware.api [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951840, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2149.424173] env[63024]: DEBUG nova.compute.manager [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Stashing vm_state: active {{(pid=63024) _prep_resize /opt/stack/nova/nova/compute/manager.py:5954}} [ 2149.514351] env[63024]: DEBUG oslo_vmware.api [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951838, 'name': PowerOnVM_Task, 'duration_secs': 0.727189} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2149.514686] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2149.514906] env[63024]: INFO nova.compute.manager [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Took 7.62 seconds to spawn the instance on the hypervisor. [ 2149.515110] env[63024]: DEBUG nova.compute.manager [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2149.516023] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f2a22a8-e1ac-4ab3-b6ba-ebb6eb341a24 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.574686] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 50c72c53-ff72-42e6-afdc-14e0ac64f490] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2149.675713] env[63024]: DEBUG oslo_vmware.api [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951840, 'name': RemoveSnapshot_Task, 'duration_secs': 0.377983} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2149.677027] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Deleted Snapshot of the VM instance {{(pid=63024) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2149.677027] env[63024]: DEBUG nova.compute.manager [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2149.677279] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68144dfc-7a1c-4842-9eb8-70ea8e5fe870 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2149.947048] env[63024]: DEBUG oslo_concurrency.lockutils [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2149.947048] env[63024]: DEBUG oslo_concurrency.lockutils [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2150.034925] env[63024]: INFO nova.compute.manager [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Took 12.40 seconds to build instance. [ 2150.073996] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e60fb9e1-00b6-40fc-a6c9-267be9d83128 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Acquiring lock "5192ad93-a4e9-4aa0-983d-186ab17360f0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2150.074435] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e60fb9e1-00b6-40fc-a6c9-267be9d83128 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lock "5192ad93-a4e9-4aa0-983d-186ab17360f0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2150.074487] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e60fb9e1-00b6-40fc-a6c9-267be9d83128 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Acquiring lock "5192ad93-a4e9-4aa0-983d-186ab17360f0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2150.074672] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e60fb9e1-00b6-40fc-a6c9-267be9d83128 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lock "5192ad93-a4e9-4aa0-983d-186ab17360f0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2150.074840] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e60fb9e1-00b6-40fc-a6c9-267be9d83128 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lock "5192ad93-a4e9-4aa0-983d-186ab17360f0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2150.076635] env[63024]: INFO nova.compute.manager [None req-e60fb9e1-00b6-40fc-a6c9-267be9d83128 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Terminating instance [ 2150.077934] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: a694e49c-37c5-483f-b1d8-5426f6a52b73] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2150.188842] env[63024]: INFO nova.compute.manager [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Shelve offloading [ 2150.306267] env[63024]: DEBUG nova.compute.manager [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Stashing vm_state: active {{(pid=63024) _prep_resize /opt/stack/nova/nova/compute/manager.py:5954}} [ 2150.452106] env[63024]: INFO nova.compute.claims [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2150.536856] env[63024]: DEBUG oslo_concurrency.lockutils [None req-60614ad6-03a9-4ebb-a432-f2c59f7e91d6 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lock "14bafeba-9f5b-4488-b29c-38939973deb9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.914s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2150.581298] env[63024]: DEBUG nova.compute.manager [None req-e60fb9e1-00b6-40fc-a6c9-267be9d83128 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2150.581604] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e60fb9e1-00b6-40fc-a6c9-267be9d83128 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2150.582100] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: da1f5cbc-47bf-4ee4-837a-b328de170489] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2150.584746] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fac02dbd-860a-4e0d-aec6-3a5dc29fb4a1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2150.593643] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-e60fb9e1-00b6-40fc-a6c9-267be9d83128 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2150.593926] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1fe22e7a-53fe-41a5-b1bd-0fb4beef7cf6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2150.600999] env[63024]: DEBUG oslo_vmware.api [None req-e60fb9e1-00b6-40fc-a6c9-267be9d83128 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Waiting for the task: (returnval){ [ 2150.600999] env[63024]: value = "task-1951841" [ 2150.600999] env[63024]: _type = "Task" [ 2150.600999] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2150.609295] env[63024]: DEBUG oslo_vmware.api [None req-e60fb9e1-00b6-40fc-a6c9-267be9d83128 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951841, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2150.692457] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2150.692764] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b21c5986-a177-4372-a55a-b4718bace374 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2150.700404] env[63024]: DEBUG oslo_vmware.api [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2150.700404] env[63024]: value = "task-1951842" [ 2150.700404] env[63024]: _type = "Task" [ 2150.700404] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2150.707877] env[63024]: DEBUG oslo_vmware.api [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951842, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2150.827801] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2150.957816] env[63024]: INFO nova.compute.resource_tracker [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Updating resource usage from migration 6b493773-408a-454d-801f-b4476cfe41cb [ 2151.054217] env[63024]: DEBUG nova.compute.manager [req-7b845d2f-3df8-4366-a8cf-56292bf054af req-00116a8c-2198-455b-81c2-020140257e21 service nova] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Received event network-changed-4ba24717-2947-46f0-9df8-733d8b40c345 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2151.054217] env[63024]: DEBUG nova.compute.manager [req-7b845d2f-3df8-4366-a8cf-56292bf054af req-00116a8c-2198-455b-81c2-020140257e21 service nova] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Refreshing instance network info cache due to event network-changed-4ba24717-2947-46f0-9df8-733d8b40c345. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2151.054217] env[63024]: DEBUG oslo_concurrency.lockutils [req-7b845d2f-3df8-4366-a8cf-56292bf054af req-00116a8c-2198-455b-81c2-020140257e21 service nova] Acquiring lock "refresh_cache-14bafeba-9f5b-4488-b29c-38939973deb9" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2151.054217] env[63024]: DEBUG oslo_concurrency.lockutils [req-7b845d2f-3df8-4366-a8cf-56292bf054af req-00116a8c-2198-455b-81c2-020140257e21 service nova] Acquired lock "refresh_cache-14bafeba-9f5b-4488-b29c-38939973deb9" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2151.054217] env[63024]: DEBUG nova.network.neutron [req-7b845d2f-3df8-4366-a8cf-56292bf054af req-00116a8c-2198-455b-81c2-020140257e21 service nova] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Refreshing network info cache for port 4ba24717-2947-46f0-9df8-733d8b40c345 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2151.089355] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: ec1f30e6-8410-4687-958f-f4e6e154b52f] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2151.100185] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1619d47a-1451-4f3c-ab15-d8129bd99b72 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2151.114911] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3b68651-db41-4e98-8d30-b578452152b9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2151.118035] env[63024]: DEBUG oslo_vmware.api [None req-e60fb9e1-00b6-40fc-a6c9-267be9d83128 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951841, 'name': PowerOffVM_Task, 'duration_secs': 0.193385} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2151.118300] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-e60fb9e1-00b6-40fc-a6c9-267be9d83128 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2151.118498] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e60fb9e1-00b6-40fc-a6c9-267be9d83128 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2151.118978] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7ac49c33-4509-4190-b205-1f4f723637aa {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2151.145231] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a480ef4-577e-4941-b401-ab0b00cb632a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2151.152155] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d51afa54-b1a0-4611-bd1e-9f6410cc9e71 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2151.165171] env[63024]: DEBUG nova.compute.provider_tree [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2151.191148] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e60fb9e1-00b6-40fc-a6c9-267be9d83128 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2151.191376] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e60fb9e1-00b6-40fc-a6c9-267be9d83128 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2151.191558] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-e60fb9e1-00b6-40fc-a6c9-267be9d83128 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Deleting the datastore file [datastore1] 5192ad93-a4e9-4aa0-983d-186ab17360f0 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2151.191802] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ba3f2a48-bc79-472e-8bd7-fa862e6f77da {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2151.199309] env[63024]: DEBUG oslo_vmware.api [None req-e60fb9e1-00b6-40fc-a6c9-267be9d83128 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Waiting for the task: (returnval){ [ 2151.199309] env[63024]: value = "task-1951844" [ 2151.199309] env[63024]: _type = "Task" [ 2151.199309] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2151.210132] env[63024]: DEBUG oslo_vmware.api [None req-e60fb9e1-00b6-40fc-a6c9-267be9d83128 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951844, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2151.213746] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] VM already powered off {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2151.213948] env[63024]: DEBUG nova.compute.manager [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2151.214748] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10606d5d-7b7f-4934-a00a-5210cb620282 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2151.220015] env[63024]: DEBUG oslo_concurrency.lockutils [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "refresh_cache-beefd67c-b791-4c19-822b-b0e21ec5f8ac" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2151.220228] env[63024]: DEBUG oslo_concurrency.lockutils [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquired lock "refresh_cache-beefd67c-b791-4c19-822b-b0e21ec5f8ac" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2151.220430] env[63024]: DEBUG nova.network.neutron [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2151.592448] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: e9784dce-9a3f-4969-b48c-9c5b17959d88] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2151.668940] env[63024]: DEBUG nova.scheduler.client.report [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2151.714636] env[63024]: DEBUG oslo_vmware.api [None req-e60fb9e1-00b6-40fc-a6c9-267be9d83128 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Task: {'id': task-1951844, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138251} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2151.715013] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-e60fb9e1-00b6-40fc-a6c9-267be9d83128 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2151.715306] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e60fb9e1-00b6-40fc-a6c9-267be9d83128 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2151.715580] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e60fb9e1-00b6-40fc-a6c9-267be9d83128 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2151.715837] env[63024]: INFO nova.compute.manager [None req-e60fb9e1-00b6-40fc-a6c9-267be9d83128 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Took 1.13 seconds to destroy the instance on the hypervisor. [ 2151.716280] env[63024]: DEBUG oslo.service.loopingcall [None req-e60fb9e1-00b6-40fc-a6c9-267be9d83128 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2151.716463] env[63024]: DEBUG nova.compute.manager [-] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2151.716599] env[63024]: DEBUG nova.network.neutron [-] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2152.095638] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 51bdfe4a-2439-4ad5-97f3-f60c70c87b9d] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2152.136938] env[63024]: DEBUG nova.network.neutron [req-7b845d2f-3df8-4366-a8cf-56292bf054af req-00116a8c-2198-455b-81c2-020140257e21 service nova] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Updated VIF entry in instance network info cache for port 4ba24717-2947-46f0-9df8-733d8b40c345. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2152.137313] env[63024]: DEBUG nova.network.neutron [req-7b845d2f-3df8-4366-a8cf-56292bf054af req-00116a8c-2198-455b-81c2-020140257e21 service nova] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Updating instance_info_cache with network_info: [{"id": "4ba24717-2947-46f0-9df8-733d8b40c345", "address": "fa:16:3e:3b:41:f5", "network": {"id": "5503f5f5-9a63-4bb8-bc39-97863100c3df", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1218611362-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd0c44555e30414c83750b762e243dc1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ba24717-29", "ovs_interfaceid": "4ba24717-2947-46f0-9df8-733d8b40c345", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2152.175689] env[63024]: DEBUG oslo_concurrency.lockutils [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.229s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2152.175896] env[63024]: INFO nova.compute.manager [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Migrating [ 2152.182128] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 1.354s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2152.258486] env[63024]: DEBUG nova.network.neutron [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Updating instance_info_cache with network_info: [{"id": "5b820f6b-5e1e-4bf8-a434-55cbc9e7968d", "address": "fa:16:3e:6d:fd:6a", "network": {"id": "c25a78c5-2c6d-4c87-8575-9620fbc983bd", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1693993539-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93098ad83ae144bf90a12c97ec863c06", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e66c4ebe-f808-4b34-bdb5-6c45edb1736f", "external-id": "cl2-zone-719", "segmentation_id": 719, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5b820f6b-5e", "ovs_interfaceid": "5b820f6b-5e1e-4bf8-a434-55cbc9e7968d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2152.473513] env[63024]: DEBUG nova.compute.manager [req-18bd4bcc-9744-4f5e-8139-9c9fd648895a req-e9b81b77-d01e-460b-aca3-095d29f26c09 service nova] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Received event network-vif-deleted-38ea7e67-f8df-4441-93e8-2983babd9f62 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2152.473647] env[63024]: INFO nova.compute.manager [req-18bd4bcc-9744-4f5e-8139-9c9fd648895a req-e9b81b77-d01e-460b-aca3-095d29f26c09 service nova] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Neutron deleted interface 38ea7e67-f8df-4441-93e8-2983babd9f62; detaching it from the instance and deleting it from the info cache [ 2152.473739] env[63024]: DEBUG nova.network.neutron [req-18bd4bcc-9744-4f5e-8139-9c9fd648895a req-e9b81b77-d01e-460b-aca3-095d29f26c09 service nova] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2152.599218] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 6c277ff8-ec25-4fd7-9dea-0efea9a0de29] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2152.640284] env[63024]: DEBUG oslo_concurrency.lockutils [req-7b845d2f-3df8-4366-a8cf-56292bf054af req-00116a8c-2198-455b-81c2-020140257e21 service nova] Releasing lock "refresh_cache-14bafeba-9f5b-4488-b29c-38939973deb9" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2152.692410] env[63024]: INFO nova.compute.claims [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2152.696079] env[63024]: DEBUG oslo_concurrency.lockutils [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "refresh_cache-cb038d54-b785-4930-b8a5-b309c5f4b58d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2152.696244] env[63024]: DEBUG oslo_concurrency.lockutils [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquired lock "refresh_cache-cb038d54-b785-4930-b8a5-b309c5f4b58d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2152.696422] env[63024]: DEBUG nova.network.neutron [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2152.761309] env[63024]: DEBUG oslo_concurrency.lockutils [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Releasing lock "refresh_cache-beefd67c-b791-4c19-822b-b0e21ec5f8ac" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2152.950701] env[63024]: DEBUG nova.network.neutron [-] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2152.979244] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-678075e2-5df2-4ec4-9cc0-f4b4b5b4fbcf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2152.989097] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28e71354-ce33-424c-8750-264e446c6467 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2153.020604] env[63024]: DEBUG nova.compute.manager [req-18bd4bcc-9744-4f5e-8139-9c9fd648895a req-e9b81b77-d01e-460b-aca3-095d29f26c09 service nova] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Detach interface failed, port_id=38ea7e67-f8df-4441-93e8-2983babd9f62, reason: Instance 5192ad93-a4e9-4aa0-983d-186ab17360f0 could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 2153.035515] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2153.036343] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85d4089d-0c69-4641-ac31-1dee6ee0b6b7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2153.043041] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2153.043299] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f000e952-1f44-4fbd-b677-54be463885f3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2153.102102] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 6d21976b-f519-4c87-a0d2-0a406060608d] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2153.202436] env[63024]: INFO nova.compute.resource_tracker [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Updating resource usage from migration ff148bac-fa34-4401-a31b-fdad842b3d7a [ 2153.353518] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25a7ee12-6ebc-4b66-9098-6e23f368ff73 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2153.361072] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-592fdfe4-b76a-4dd4-8949-284b80189184 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2153.397288] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0541e827-adfa-4235-9f24-0383dc741eb6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2153.404447] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bea435d-0b8d-4f68-86e0-a2c39ecfdefc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2153.417391] env[63024]: DEBUG nova.compute.provider_tree [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2153.453578] env[63024]: INFO nova.compute.manager [-] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Took 1.74 seconds to deallocate network for instance. [ 2153.462961] env[63024]: DEBUG nova.network.neutron [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Updating instance_info_cache with network_info: [{"id": "f5649c3a-8bab-4abb-a1a2-1d88b780eba2", "address": "fa:16:3e:a9:0a:72", "network": {"id": "27687c23-521d-4beb-ad4f-278994149c2c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-381619610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.138", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e53c02ad56640dc8cbc8839669b67bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "09bf081b-cdf0-4977-abe2-2339a87409ab", "external-id": "nsx-vlan-transportzone-378", "segmentation_id": 378, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5649c3a-8b", "ovs_interfaceid": "f5649c3a-8bab-4abb-a1a2-1d88b780eba2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2153.605624] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: c71abfaa-dc65-4d1b-8a34-dff9dd682fe7] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2153.921041] env[63024]: DEBUG nova.scheduler.client.report [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2153.959990] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e60fb9e1-00b6-40fc-a6c9-267be9d83128 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2153.966664] env[63024]: DEBUG oslo_concurrency.lockutils [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Releasing lock "refresh_cache-cb038d54-b785-4930-b8a5-b309c5f4b58d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2154.110517] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 12e63b42-5554-44d5-86eb-d592bc0b2ad6] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2154.426053] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.243s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2154.426053] env[63024]: INFO nova.compute.manager [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Migrating [ 2154.432220] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e60fb9e1-00b6-40fc-a6c9-267be9d83128 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.472s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2154.432447] env[63024]: DEBUG nova.objects.instance [None req-e60fb9e1-00b6-40fc-a6c9-267be9d83128 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lazy-loading 'resources' on Instance uuid 5192ad93-a4e9-4aa0-983d-186ab17360f0 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2154.498222] env[63024]: DEBUG nova.compute.manager [req-fbbcbb18-3509-405b-965c-b5b62bbd2ad7 req-7faa1f35-0c1f-4ba7-816b-8c13c02333c5 service nova] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Received event network-vif-unplugged-5b820f6b-5e1e-4bf8-a434-55cbc9e7968d {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2154.498465] env[63024]: DEBUG oslo_concurrency.lockutils [req-fbbcbb18-3509-405b-965c-b5b62bbd2ad7 req-7faa1f35-0c1f-4ba7-816b-8c13c02333c5 service nova] Acquiring lock "beefd67c-b791-4c19-822b-b0e21ec5f8ac-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2154.498708] env[63024]: DEBUG oslo_concurrency.lockutils [req-fbbcbb18-3509-405b-965c-b5b62bbd2ad7 req-7faa1f35-0c1f-4ba7-816b-8c13c02333c5 service nova] Lock "beefd67c-b791-4c19-822b-b0e21ec5f8ac-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2154.498884] env[63024]: DEBUG oslo_concurrency.lockutils [req-fbbcbb18-3509-405b-965c-b5b62bbd2ad7 req-7faa1f35-0c1f-4ba7-816b-8c13c02333c5 service nova] Lock "beefd67c-b791-4c19-822b-b0e21ec5f8ac-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2154.499071] env[63024]: DEBUG nova.compute.manager [req-fbbcbb18-3509-405b-965c-b5b62bbd2ad7 req-7faa1f35-0c1f-4ba7-816b-8c13c02333c5 service nova] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] No waiting events found dispatching network-vif-unplugged-5b820f6b-5e1e-4bf8-a434-55cbc9e7968d {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2154.499243] env[63024]: WARNING nova.compute.manager [req-fbbcbb18-3509-405b-965c-b5b62bbd2ad7 req-7faa1f35-0c1f-4ba7-816b-8c13c02333c5 service nova] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Received unexpected event network-vif-unplugged-5b820f6b-5e1e-4bf8-a434-55cbc9e7968d for instance with vm_state shelved and task_state shelving_offloading. [ 2154.499405] env[63024]: DEBUG nova.compute.manager [req-fbbcbb18-3509-405b-965c-b5b62bbd2ad7 req-7faa1f35-0c1f-4ba7-816b-8c13c02333c5 service nova] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Received event network-changed-5b820f6b-5e1e-4bf8-a434-55cbc9e7968d {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2154.499558] env[63024]: DEBUG nova.compute.manager [req-fbbcbb18-3509-405b-965c-b5b62bbd2ad7 req-7faa1f35-0c1f-4ba7-816b-8c13c02333c5 service nova] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Refreshing instance network info cache due to event network-changed-5b820f6b-5e1e-4bf8-a434-55cbc9e7968d. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2154.499740] env[63024]: DEBUG oslo_concurrency.lockutils [req-fbbcbb18-3509-405b-965c-b5b62bbd2ad7 req-7faa1f35-0c1f-4ba7-816b-8c13c02333c5 service nova] Acquiring lock "refresh_cache-beefd67c-b791-4c19-822b-b0e21ec5f8ac" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2154.499913] env[63024]: DEBUG oslo_concurrency.lockutils [req-fbbcbb18-3509-405b-965c-b5b62bbd2ad7 req-7faa1f35-0c1f-4ba7-816b-8c13c02333c5 service nova] Acquired lock "refresh_cache-beefd67c-b791-4c19-822b-b0e21ec5f8ac" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2154.500091] env[63024]: DEBUG nova.network.neutron [req-fbbcbb18-3509-405b-965c-b5b62bbd2ad7 req-7faa1f35-0c1f-4ba7-816b-8c13c02333c5 service nova] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Refreshing network info cache for port 5b820f6b-5e1e-4bf8-a434-55cbc9e7968d {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2154.613248] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 9e32eb32-6eff-4875-b4a3-adfab4647023] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2154.942875] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "refresh_cache-fa326fe2-c00e-4379-954a-9b3275328abc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2154.943019] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquired lock "refresh_cache-fa326fe2-c00e-4379-954a-9b3275328abc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2154.943078] env[63024]: DEBUG nova.network.neutron [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2155.077476] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35bad4af-5ac4-4e67-ae17-a62b4c8aa5a8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2155.085311] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e24efae0-e79d-4aa8-897e-fb19955ef07f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2155.115956] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: ea24d375-ba88-42ca-a07e-52000ec613c0] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2155.121044] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d8f1b93-72ee-47b2-9fea-9a2855e44cb5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2155.128998] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c137ab2f-8fdb-44cb-8960-42d0c036dd13 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2155.142372] env[63024]: DEBUG nova.compute.provider_tree [None req-e60fb9e1-00b6-40fc-a6c9-267be9d83128 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2155.252999] env[63024]: DEBUG nova.network.neutron [req-fbbcbb18-3509-405b-965c-b5b62bbd2ad7 req-7faa1f35-0c1f-4ba7-816b-8c13c02333c5 service nova] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Updated VIF entry in instance network info cache for port 5b820f6b-5e1e-4bf8-a434-55cbc9e7968d. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2155.253457] env[63024]: DEBUG nova.network.neutron [req-fbbcbb18-3509-405b-965c-b5b62bbd2ad7 req-7faa1f35-0c1f-4ba7-816b-8c13c02333c5 service nova] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Updating instance_info_cache with network_info: [{"id": "5b820f6b-5e1e-4bf8-a434-55cbc9e7968d", "address": "fa:16:3e:6d:fd:6a", "network": {"id": "c25a78c5-2c6d-4c87-8575-9620fbc983bd", "bridge": null, "label": "tempest-DeleteServersTestJSON-1693993539-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93098ad83ae144bf90a12c97ec863c06", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap5b820f6b-5e", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2155.481147] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04bf1af0-562a-452c-864a-12e3cb17e4be {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2155.506305] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Updating instance 'cb038d54-b785-4930-b8a5-b309c5f4b58d' progress to 0 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2155.624807] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 96afa44e-d8c6-419c-ae69-04b7b306c2c5] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2155.648180] env[63024]: DEBUG nova.scheduler.client.report [None req-e60fb9e1-00b6-40fc-a6c9-267be9d83128 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2155.704218] env[63024]: DEBUG nova.network.neutron [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Updating instance_info_cache with network_info: [{"id": "a13f8ab3-d900-447f-8772-5be6b3d48296", "address": "fa:16:3e:47:7f:73", "network": {"id": "0cbe22f7-6322-4d92-9a77-2753f6449a2d", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-366684254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.141", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6072e8931d9540ad8fe4a2b4b1ec782d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3ccbdbb-8b49-4a26-913f-2a448b72280f", "external-id": "nsx-vlan-transportzone-412", "segmentation_id": 412, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa13f8ab3-d9", "ovs_interfaceid": "a13f8ab3-d900-447f-8772-5be6b3d48296", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2155.756016] env[63024]: DEBUG oslo_concurrency.lockutils [req-fbbcbb18-3509-405b-965c-b5b62bbd2ad7 req-7faa1f35-0c1f-4ba7-816b-8c13c02333c5 service nova] Releasing lock "refresh_cache-beefd67c-b791-4c19-822b-b0e21ec5f8ac" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2156.013369] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2156.013661] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5d859799-da46-4c7a-8338-14c1880de16c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2156.021107] env[63024]: DEBUG oslo_vmware.api [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2156.021107] env[63024]: value = "task-1951846" [ 2156.021107] env[63024]: _type = "Task" [ 2156.021107] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2156.030121] env[63024]: DEBUG oslo_vmware.api [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951846, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2156.129106] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 669c45b0-34d6-45f8-a30e-b9b96cfd71ef] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2156.153702] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e60fb9e1-00b6-40fc-a6c9-267be9d83128 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.721s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2156.173249] env[63024]: INFO nova.scheduler.client.report [None req-e60fb9e1-00b6-40fc-a6c9-267be9d83128 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Deleted allocations for instance 5192ad93-a4e9-4aa0-983d-186ab17360f0 [ 2156.206573] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Releasing lock "refresh_cache-fa326fe2-c00e-4379-954a-9b3275328abc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2156.531253] env[63024]: DEBUG oslo_vmware.api [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951846, 'name': PowerOffVM_Task, 'duration_secs': 0.364668} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2156.531593] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2156.531819] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Updating instance 'cb038d54-b785-4930-b8a5-b309c5f4b58d' progress to 17 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2156.632481] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 6e0aa58b-85e0-4e74-812f-cc01041ed6d3] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2156.681430] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e60fb9e1-00b6-40fc-a6c9-267be9d83128 tempest-AttachVolumeTestJSON-1100667373 tempest-AttachVolumeTestJSON-1100667373-project-member] Lock "5192ad93-a4e9-4aa0-983d-186ab17360f0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.607s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2157.039079] env[63024]: DEBUG nova.virt.hardware [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2157.039079] env[63024]: DEBUG nova.virt.hardware [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2157.039079] env[63024]: DEBUG nova.virt.hardware [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2157.039079] env[63024]: DEBUG nova.virt.hardware [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2157.039576] env[63024]: DEBUG nova.virt.hardware [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2157.039948] env[63024]: DEBUG nova.virt.hardware [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2157.040324] env[63024]: DEBUG nova.virt.hardware [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2157.040635] env[63024]: DEBUG nova.virt.hardware [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2157.041980] env[63024]: DEBUG nova.virt.hardware [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2157.041980] env[63024]: DEBUG nova.virt.hardware [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2157.041980] env[63024]: DEBUG nova.virt.hardware [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2157.046303] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3bfce174-91f7-4b99-9007-f75093346381 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2157.061906] env[63024]: DEBUG oslo_vmware.api [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2157.061906] env[63024]: value = "task-1951848" [ 2157.061906] env[63024]: _type = "Task" [ 2157.061906] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2157.070330] env[63024]: DEBUG oslo_vmware.api [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951848, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2157.136339] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 9e8e7b6e-1bb2-4e66-b734-2f56e31302af] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2157.572517] env[63024]: DEBUG oslo_vmware.api [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951848, 'name': ReconfigVM_Task, 'duration_secs': 0.298431} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2157.572836] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Updating instance 'cb038d54-b785-4930-b8a5-b309c5f4b58d' progress to 33 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2157.642494] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: a0a9ea07-dda8-45b4-bab9-cdaf683c0a21] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2157.720175] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25d675d4-f247-4466-b217-d177949ba1f4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2157.739154] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Updating instance 'fa326fe2-c00e-4379-954a-9b3275328abc' progress to 0 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2158.079932] env[63024]: DEBUG nova.virt.hardware [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2158.080214] env[63024]: DEBUG nova.virt.hardware [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2158.080445] env[63024]: DEBUG nova.virt.hardware [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2158.080654] env[63024]: DEBUG nova.virt.hardware [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2158.080805] env[63024]: DEBUG nova.virt.hardware [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2158.080955] env[63024]: DEBUG nova.virt.hardware [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2158.081186] env[63024]: DEBUG nova.virt.hardware [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2158.081348] env[63024]: DEBUG nova.virt.hardware [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2158.081515] env[63024]: DEBUG nova.virt.hardware [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2158.081676] env[63024]: DEBUG nova.virt.hardware [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2158.081850] env[63024]: DEBUG nova.virt.hardware [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2158.087602] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Reconfiguring VM instance instance-0000006d to detach disk 2000 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2158.087900] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6ecf4c56-2e7f-47d2-a94d-83a0073b2c7c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.106377] env[63024]: DEBUG oslo_vmware.api [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2158.106377] env[63024]: value = "task-1951849" [ 2158.106377] env[63024]: _type = "Task" [ 2158.106377] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2158.115666] env[63024]: DEBUG oslo_vmware.api [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951849, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2158.145446] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 43cdc362-588f-42cc-a4b2-a08fe60293a5] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2158.245315] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2158.245640] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ac52dc33-9b7d-414a-b291-38f34c9b3fff {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.252788] env[63024]: DEBUG oslo_vmware.api [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 2158.252788] env[63024]: value = "task-1951850" [ 2158.252788] env[63024]: _type = "Task" [ 2158.252788] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2158.261227] env[63024]: DEBUG oslo_vmware.api [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951850, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2158.544077] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2158.544299] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2158.544489] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Deleting the datastore file [datastore1] beefd67c-b791-4c19-822b-b0e21ec5f8ac {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2158.544768] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dd384a40-75a3-4295-a29a-bfdfffb57234 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.550993] env[63024]: DEBUG oslo_vmware.api [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2158.550993] env[63024]: value = "task-1951851" [ 2158.550993] env[63024]: _type = "Task" [ 2158.550993] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2158.558148] env[63024]: DEBUG oslo_vmware.api [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951851, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2158.615168] env[63024]: DEBUG oslo_vmware.api [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951849, 'name': ReconfigVM_Task, 'duration_secs': 0.192257} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2158.615464] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Reconfigured VM instance instance-0000006d to detach disk 2000 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2158.616276] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a3c3e1c-103a-491b-82cc-7d7130d945f5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.640983] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] cb038d54-b785-4930-b8a5-b309c5f4b58d/cb038d54-b785-4930-b8a5-b309c5f4b58d.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2158.641243] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4b8fe816-06ad-4ba5-a695-e306f93da94e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2158.653832] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 56d220f3-b97c-4cbe-b582-c4a4f1171472] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2158.661172] env[63024]: DEBUG oslo_vmware.api [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2158.661172] env[63024]: value = "task-1951852" [ 2158.661172] env[63024]: _type = "Task" [ 2158.661172] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2158.668795] env[63024]: DEBUG oslo_vmware.api [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951852, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2158.764178] env[63024]: DEBUG oslo_vmware.api [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951850, 'name': PowerOffVM_Task, 'duration_secs': 0.180996} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2158.764531] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2158.765086] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Updating instance 'fa326fe2-c00e-4379-954a-9b3275328abc' progress to 17 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2159.061126] env[63024]: DEBUG oslo_vmware.api [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951851, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.141845} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2159.061428] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2159.061661] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2159.061791] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2159.082235] env[63024]: INFO nova.scheduler.client.report [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Deleted allocations for instance beefd67c-b791-4c19-822b-b0e21ec5f8ac [ 2159.156540] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: c28e7c21-7e7d-4cda-81e8-63538bd8a1f7] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2159.170392] env[63024]: DEBUG oslo_vmware.api [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951852, 'name': ReconfigVM_Task, 'duration_secs': 0.290802} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2159.170624] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Reconfigured VM instance instance-0000006d to attach disk [datastore1] cb038d54-b785-4930-b8a5-b309c5f4b58d/cb038d54-b785-4930-b8a5-b309c5f4b58d.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2159.170899] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Updating instance 'cb038d54-b785-4930-b8a5-b309c5f4b58d' progress to 50 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2159.272322] env[63024]: DEBUG nova.virt.hardware [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2159.272604] env[63024]: DEBUG nova.virt.hardware [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2159.272758] env[63024]: DEBUG nova.virt.hardware [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2159.272962] env[63024]: DEBUG nova.virt.hardware [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2159.273131] env[63024]: DEBUG nova.virt.hardware [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2159.273283] env[63024]: DEBUG nova.virt.hardware [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2159.273502] env[63024]: DEBUG nova.virt.hardware [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2159.273657] env[63024]: DEBUG nova.virt.hardware [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2159.273823] env[63024]: DEBUG nova.virt.hardware [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2159.273989] env[63024]: DEBUG nova.virt.hardware [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2159.274189] env[63024]: DEBUG nova.virt.hardware [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2159.279597] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f9966abc-6966-4606-9168-45c34a16ea3f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.295460] env[63024]: DEBUG oslo_vmware.api [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 2159.295460] env[63024]: value = "task-1951853" [ 2159.295460] env[63024]: _type = "Task" [ 2159.295460] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2159.307400] env[63024]: DEBUG oslo_vmware.api [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951853, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2159.586686] env[63024]: DEBUG oslo_concurrency.lockutils [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2159.587028] env[63024]: DEBUG oslo_concurrency.lockutils [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2159.587275] env[63024]: DEBUG nova.objects.instance [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lazy-loading 'resources' on Instance uuid beefd67c-b791-4c19-822b-b0e21ec5f8ac {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2159.660762] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 9edbda30-2e28-4961-a6ad-5ab34c40ed44] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2159.677536] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f69992a2-e581-4742-8b91-d0e34bcc2c0b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.700716] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b516a428-ed22-4484-b521-413d8877f2db {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2159.721137] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Updating instance 'cb038d54-b785-4930-b8a5-b309c5f4b58d' progress to 67 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2159.805710] env[63024]: DEBUG oslo_vmware.api [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951853, 'name': ReconfigVM_Task, 'duration_secs': 0.190106} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2159.806047] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Updating instance 'fa326fe2-c00e-4379-954a-9b3275328abc' progress to 33 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2160.089733] env[63024]: DEBUG nova.objects.instance [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lazy-loading 'numa_topology' on Instance uuid beefd67c-b791-4c19-822b-b0e21ec5f8ac {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2160.163976] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 9ca6342c-55bd-4c78-9fa6-3caf4ec744bc] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2160.312731] env[63024]: DEBUG nova.virt.hardware [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2160.312990] env[63024]: DEBUG nova.virt.hardware [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2160.313166] env[63024]: DEBUG nova.virt.hardware [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2160.313509] env[63024]: DEBUG nova.virt.hardware [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2160.313669] env[63024]: DEBUG nova.virt.hardware [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2160.313820] env[63024]: DEBUG nova.virt.hardware [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2160.314036] env[63024]: DEBUG nova.virt.hardware [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2160.314199] env[63024]: DEBUG nova.virt.hardware [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2160.314444] env[63024]: DEBUG nova.virt.hardware [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2160.314629] env[63024]: DEBUG nova.virt.hardware [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2160.314808] env[63024]: DEBUG nova.virt.hardware [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2160.320158] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Reconfiguring VM instance instance-0000006e to detach disk 2000 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2160.320804] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9d2ac7d8-efb3-40db-b954-89e32d10b7c2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.340532] env[63024]: DEBUG oslo_vmware.api [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 2160.340532] env[63024]: value = "task-1951855" [ 2160.340532] env[63024]: _type = "Task" [ 2160.340532] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2160.351189] env[63024]: DEBUG oslo_vmware.api [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951855, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2160.592276] env[63024]: DEBUG nova.objects.base [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=63024) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2160.646486] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4f81a476-0cd9-4de0-ae4d-e518e1aab1a4 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "beefd67c-b791-4c19-822b-b0e21ec5f8ac" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2160.666287] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 0f371c69-c7ae-4649-b038-be82e8ca74e1] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2160.705831] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be00762a-3b3d-4abc-9081-6188bf1078e2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.713670] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d403815e-cf25-469d-92ee-41d12859b45d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.743280] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50def54b-110b-4522-8a4c-7e3577c58a33 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.750060] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d928007c-ad49-4265-ade9-758efbcc2d6a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.762936] env[63024]: DEBUG nova.compute.provider_tree [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2160.850993] env[63024]: DEBUG oslo_vmware.api [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951855, 'name': ReconfigVM_Task, 'duration_secs': 0.162128} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2160.851298] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Reconfigured VM instance instance-0000006e to detach disk 2000 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2160.852054] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b0c73ff-867a-43b3-8f84-147fb1578240 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.874010] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Reconfiguring VM instance instance-0000006e to attach disk [datastore1] fa326fe2-c00e-4379-954a-9b3275328abc/fa326fe2-c00e-4379-954a-9b3275328abc.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2160.874290] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-580f7143-6924-47b7-a26c-db51f25f5897 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2160.892193] env[63024]: DEBUG oslo_vmware.api [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 2160.892193] env[63024]: value = "task-1951856" [ 2160.892193] env[63024]: _type = "Task" [ 2160.892193] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2160.902215] env[63024]: DEBUG oslo_vmware.api [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951856, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2161.170195] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 9a7f4452-ae50-4779-8474-11d3a6d3533f] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2161.266667] env[63024]: DEBUG nova.scheduler.client.report [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2161.402316] env[63024]: DEBUG oslo_vmware.api [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951856, 'name': ReconfigVM_Task, 'duration_secs': 0.232319} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2161.402619] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Reconfigured VM instance instance-0000006e to attach disk [datastore1] fa326fe2-c00e-4379-954a-9b3275328abc/fa326fe2-c00e-4379-954a-9b3275328abc.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2161.402884] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Updating instance 'fa326fe2-c00e-4379-954a-9b3275328abc' progress to 50 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2161.406851] env[63024]: DEBUG nova.network.neutron [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Port f5649c3a-8bab-4abb-a1a2-1d88b780eba2 binding to destination host cpu-1 is already ACTIVE {{(pid=63024) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2161.673452] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 01b8072a-4483-4932-8294-7e5b48e6b203] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2161.771131] env[63024]: DEBUG oslo_concurrency.lockutils [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.184s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2161.914466] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba8ad728-4b11-413e-b60e-8c8d2ea841d0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.935520] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11582e38-9f51-4626-ba79-6272c037423c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2161.952753] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Updating instance 'fa326fe2-c00e-4379-954a-9b3275328abc' progress to 67 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2162.176977] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 77c27741-ee3a-4a8b-bbd3-89759288f7c6] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2162.278671] env[63024]: DEBUG oslo_concurrency.lockutils [None req-59acf0b5-44bb-453b-9ca3-80a04dd0896e tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "beefd67c-b791-4c19-822b-b0e21ec5f8ac" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 30.091s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2162.279632] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4f81a476-0cd9-4de0-ae4d-e518e1aab1a4 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "beefd67c-b791-4c19-822b-b0e21ec5f8ac" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 1.633s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2162.279863] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4f81a476-0cd9-4de0-ae4d-e518e1aab1a4 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "beefd67c-b791-4c19-822b-b0e21ec5f8ac-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2162.280188] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4f81a476-0cd9-4de0-ae4d-e518e1aab1a4 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "beefd67c-b791-4c19-822b-b0e21ec5f8ac-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2162.280390] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4f81a476-0cd9-4de0-ae4d-e518e1aab1a4 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "beefd67c-b791-4c19-822b-b0e21ec5f8ac-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2162.281987] env[63024]: INFO nova.compute.manager [None req-4f81a476-0cd9-4de0-ae4d-e518e1aab1a4 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Terminating instance [ 2162.432333] env[63024]: DEBUG oslo_concurrency.lockutils [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "cb038d54-b785-4930-b8a5-b309c5f4b58d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2162.432587] env[63024]: DEBUG oslo_concurrency.lockutils [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "cb038d54-b785-4930-b8a5-b309c5f4b58d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2162.432768] env[63024]: DEBUG oslo_concurrency.lockutils [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "cb038d54-b785-4930-b8a5-b309c5f4b58d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2162.541585] env[63024]: DEBUG nova.network.neutron [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Port a13f8ab3-d900-447f-8772-5be6b3d48296 binding to destination host cpu-1 is already ACTIVE {{(pid=63024) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2162.680539] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: e1be531c-e849-42ac-8319-5bd453a7a562] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2162.785666] env[63024]: DEBUG nova.compute.manager [None req-4f81a476-0cd9-4de0-ae4d-e518e1aab1a4 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2162.785825] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4f81a476-0cd9-4de0-ae4d-e518e1aab1a4 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2162.786234] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3116c999-5faf-44a8-b011-bb4b06e72705 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.795816] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bed0ef24-3793-4599-8f37-918a395a439f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2162.823927] env[63024]: WARNING nova.virt.vmwareapi.vmops [None req-4f81a476-0cd9-4de0-ae4d-e518e1aab1a4 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance beefd67c-b791-4c19-822b-b0e21ec5f8ac could not be found. [ 2162.824146] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4f81a476-0cd9-4de0-ae4d-e518e1aab1a4 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2162.824352] env[63024]: INFO nova.compute.manager [None req-4f81a476-0cd9-4de0-ae4d-e518e1aab1a4 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2162.824754] env[63024]: DEBUG oslo.service.loopingcall [None req-4f81a476-0cd9-4de0-ae4d-e518e1aab1a4 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2162.824949] env[63024]: DEBUG nova.compute.manager [-] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2162.824949] env[63024]: DEBUG nova.network.neutron [-] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2163.183599] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 52c17abc-78f0-417b-8675-e8d62bc8baa3] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2163.477907] env[63024]: DEBUG oslo_concurrency.lockutils [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "refresh_cache-cb038d54-b785-4930-b8a5-b309c5f4b58d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2163.478252] env[63024]: DEBUG oslo_concurrency.lockutils [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquired lock "refresh_cache-cb038d54-b785-4930-b8a5-b309c5f4b58d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2163.478550] env[63024]: DEBUG nova.network.neutron [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2163.566038] env[63024]: DEBUG nova.network.neutron [-] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2163.573682] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "fa326fe2-c00e-4379-954a-9b3275328abc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2163.573682] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "fa326fe2-c00e-4379-954a-9b3275328abc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2163.573682] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "fa326fe2-c00e-4379-954a-9b3275328abc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2163.686875] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: e0a37f54-14ca-4eea-a9b3-6e652ca1e48d] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2164.068908] env[63024]: INFO nova.compute.manager [-] [instance: beefd67c-b791-4c19-822b-b0e21ec5f8ac] Took 1.24 seconds to deallocate network for instance. [ 2164.190364] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 73db94b8-cfa8-4457-bccb-d4b780edbd93] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2164.240545] env[63024]: DEBUG nova.network.neutron [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Updating instance_info_cache with network_info: [{"id": "f5649c3a-8bab-4abb-a1a2-1d88b780eba2", "address": "fa:16:3e:a9:0a:72", "network": {"id": "27687c23-521d-4beb-ad4f-278994149c2c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-381619610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.138", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e53c02ad56640dc8cbc8839669b67bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "09bf081b-cdf0-4977-abe2-2339a87409ab", "external-id": "nsx-vlan-transportzone-378", "segmentation_id": 378, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5649c3a-8b", "ovs_interfaceid": "f5649c3a-8bab-4abb-a1a2-1d88b780eba2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2164.616225] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "refresh_cache-fa326fe2-c00e-4379-954a-9b3275328abc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2164.616420] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquired lock "refresh_cache-fa326fe2-c00e-4379-954a-9b3275328abc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2164.616596] env[63024]: DEBUG nova.network.neutron [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2164.693403] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 1448c924-7c61-4c43-a4e7-5a6dd45375cc] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2164.743703] env[63024]: DEBUG oslo_concurrency.lockutils [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Releasing lock "refresh_cache-cb038d54-b785-4930-b8a5-b309c5f4b58d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2164.755189] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e018a6f3-dbc5-468e-af6a-e3ed4c950007 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquiring lock "94893f45-fb96-463b-82a9-e2fd884b81f8" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2164.755395] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e018a6f3-dbc5-468e-af6a-e3ed4c950007 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "94893f45-fb96-463b-82a9-e2fd884b81f8" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2165.106432] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4f81a476-0cd9-4de0-ae4d-e518e1aab1a4 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "beefd67c-b791-4c19-822b-b0e21ec5f8ac" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.827s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2165.197613] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: fe6847e2-a742-4338-983f-698c13aaefde] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2165.251975] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-655f2d47-32aa-4fe4-9a59-5845e80168d8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.260345] env[63024]: DEBUG nova.compute.utils [None req-e018a6f3-dbc5-468e-af6a-e3ed4c950007 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2165.265025] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cab37aa5-3913-4c63-8675-cc8be11b16a7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2165.387821] env[63024]: DEBUG nova.network.neutron [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Updating instance_info_cache with network_info: [{"id": "a13f8ab3-d900-447f-8772-5be6b3d48296", "address": "fa:16:3e:47:7f:73", "network": {"id": "0cbe22f7-6322-4d92-9a77-2753f6449a2d", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-366684254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.141", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6072e8931d9540ad8fe4a2b4b1ec782d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3ccbdbb-8b49-4a26-913f-2a448b72280f", "external-id": "nsx-vlan-transportzone-412", "segmentation_id": 412, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa13f8ab3-d9", "ovs_interfaceid": "a13f8ab3-d900-447f-8772-5be6b3d48296", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2165.686461] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "4c39a074-d380-46a3-b1cc-81d72034b743" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2165.686461] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "4c39a074-d380-46a3-b1cc-81d72034b743" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2165.701053] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 9bf1316e-f1ae-426e-a0a2-d814a2460c4d] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2165.764885] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e018a6f3-dbc5-468e-af6a-e3ed4c950007 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "94893f45-fb96-463b-82a9-e2fd884b81f8" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.009s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2165.890310] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Releasing lock "refresh_cache-fa326fe2-c00e-4379-954a-9b3275328abc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2166.187681] env[63024]: DEBUG nova.compute.manager [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2166.284387] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: c12774e4-77d1-4001-8d5d-0240dfed4ead] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2166.423867] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28c1e21b-29f2-4be4-ab49-f9186593467b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.446929] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8124a69e-f986-4ef8-9bf2-5b03846e5b49 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.449994] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3366b062-c9a9-4cd2-9597-6a670ea5478f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.456638] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Updating instance 'cb038d54-b785-4930-b8a5-b309c5f4b58d' progress to 83 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2166.476796] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-648e3d16-4869-424a-8a5f-1d6fdcbd14c7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.483467] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Updating instance 'fa326fe2-c00e-4379-954a-9b3275328abc' progress to 83 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2166.707514] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 92d1f96e-bbe7-4654-9d3a-47ba40057157] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2166.712697] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2166.712946] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2166.714387] env[63024]: INFO nova.compute.claims [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2166.827169] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e018a6f3-dbc5-468e-af6a-e3ed4c950007 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquiring lock "94893f45-fb96-463b-82a9-e2fd884b81f8" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2166.827446] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e018a6f3-dbc5-468e-af6a-e3ed4c950007 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "94893f45-fb96-463b-82a9-e2fd884b81f8" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2166.827684] env[63024]: INFO nova.compute.manager [None req-e018a6f3-dbc5-468e-af6a-e3ed4c950007 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Attaching volume d5ec318c-cfd2-4b12-bd71-32945e70712e to /dev/sdb [ 2166.861918] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-933f756e-1d7c-4cdc-b952-103c6e22b1bc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.869820] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb517524-1397-4de9-a556-c2d0bac2d37c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.883728] env[63024]: DEBUG nova.virt.block_device [None req-e018a6f3-dbc5-468e-af6a-e3ed4c950007 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Updating existing volume attachment record: defdab61-07a1-4486-a079-137238bc935e {{(pid=63024) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2166.979087] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2166.979424] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d729f0ae-498e-46aa-af03-a2bae1cc3ac1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2166.992970] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2166.992970] env[63024]: DEBUG oslo_vmware.api [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2166.992970] env[63024]: value = "task-1951858" [ 2166.992970] env[63024]: _type = "Task" [ 2166.992970] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2166.992970] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-944047bd-06af-4113-8ad1-b1e883310670 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.006315] env[63024]: DEBUG oslo_vmware.api [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951858, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2167.007833] env[63024]: DEBUG oslo_vmware.api [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 2167.007833] env[63024]: value = "task-1951859" [ 2167.007833] env[63024]: _type = "Task" [ 2167.007833] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2167.017032] env[63024]: DEBUG oslo_vmware.api [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951859, 'name': PowerOnVM_Task} progress is 33%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2167.210124] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 7cf0ac90-d87d-4644-8a88-da5328d1721d] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2167.506355] env[63024]: DEBUG oslo_vmware.api [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951858, 'name': PowerOnVM_Task, 'duration_secs': 0.451974} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2167.506688] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2167.506833] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-38f6ea69-4072-4607-be91-4e0812e0796e tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Updating instance 'cb038d54-b785-4930-b8a5-b309c5f4b58d' progress to 100 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2167.519379] env[63024]: DEBUG oslo_vmware.api [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951859, 'name': PowerOnVM_Task, 'duration_secs': 0.40128} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2167.519696] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2167.519933] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1ec7ee8c-ec9b-475c-aaa6-f9d52548c69d tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Updating instance 'fa326fe2-c00e-4379-954a-9b3275328abc' progress to 100 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2167.713802] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: ac60546a-37b2-4d2a-8505-61fe202e2ed0] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2167.858217] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8978cc7e-92c2-4ee7-9514-c8777cbf7b91 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.865657] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ebd1875-c87d-4f91-ab43-ab70bd288fb1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.896467] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1607c16c-9981-4fcd-a284-f9ec82f3c861 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.904321] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a74a855-6312-4151-9c82-748bf92a9cfc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2167.918165] env[63024]: DEBUG nova.compute.provider_tree [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2168.217813] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: b286fb9d-c14a-4aa3-bdc8-ad6c1a9263b4] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2168.421780] env[63024]: DEBUG nova.scheduler.client.report [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2168.721162] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 1709d916-d0c4-4706-b41b-8b0ed25f3331] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2168.926716] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.214s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2168.927268] env[63024]: DEBUG nova.compute.manager [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2169.224285] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 9267e5e4-732d-47f1-8a30-d926a1269fb9] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2169.432237] env[63024]: DEBUG nova.compute.utils [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2169.433281] env[63024]: DEBUG nova.compute.manager [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2169.433469] env[63024]: DEBUG nova.network.neutron [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2169.495865] env[63024]: DEBUG nova.policy [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a7c32db2d81e40c492c1362d8356a03c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '93098ad83ae144bf90a12c97ec863c06', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 2169.727603] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 839776ef-0562-424d-b301-2aa896f32e14] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2169.937028] env[63024]: DEBUG nova.compute.manager [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2170.015957] env[63024]: DEBUG nova.network.neutron [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Port a13f8ab3-d900-447f-8772-5be6b3d48296 binding to destination host cpu-1 is already ACTIVE {{(pid=63024) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2170.016240] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "refresh_cache-fa326fe2-c00e-4379-954a-9b3275328abc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2170.016393] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquired lock "refresh_cache-fa326fe2-c00e-4379-954a-9b3275328abc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2170.016560] env[63024]: DEBUG nova.network.neutron [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2170.086970] env[63024]: DEBUG nova.network.neutron [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Successfully created port: 94e31c56-bc4b-4f1b-9a4b-4311f832022a {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2170.231485] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 601a003d-811c-4698-b0b6-054482d32c21] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2170.734849] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 31a693b6-293a-4f01-9baf-a9e7e8d453d4] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2170.846094] env[63024]: DEBUG nova.network.neutron [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Port f5649c3a-8bab-4abb-a1a2-1d88b780eba2 binding to destination host cpu-1 is already ACTIVE {{(pid=63024) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2170.846386] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "refresh_cache-cb038d54-b785-4930-b8a5-b309c5f4b58d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2170.846546] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquired lock "refresh_cache-cb038d54-b785-4930-b8a5-b309c5f4b58d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2170.846720] env[63024]: DEBUG nova.network.neutron [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2170.892917] env[63024]: DEBUG nova.network.neutron [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Updating instance_info_cache with network_info: [{"id": "a13f8ab3-d900-447f-8772-5be6b3d48296", "address": "fa:16:3e:47:7f:73", "network": {"id": "0cbe22f7-6322-4d92-9a77-2753f6449a2d", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-366684254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.141", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6072e8931d9540ad8fe4a2b4b1ec782d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3ccbdbb-8b49-4a26-913f-2a448b72280f", "external-id": "nsx-vlan-transportzone-412", "segmentation_id": 412, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa13f8ab3-d9", "ovs_interfaceid": "a13f8ab3-d900-447f-8772-5be6b3d48296", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2170.946901] env[63024]: DEBUG nova.compute.manager [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2170.974264] env[63024]: DEBUG nova.virt.hardware [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2170.974688] env[63024]: DEBUG nova.virt.hardware [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2170.974974] env[63024]: DEBUG nova.virt.hardware [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2170.975215] env[63024]: DEBUG nova.virt.hardware [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2170.975373] env[63024]: DEBUG nova.virt.hardware [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2170.975526] env[63024]: DEBUG nova.virt.hardware [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2170.975738] env[63024]: DEBUG nova.virt.hardware [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2170.975895] env[63024]: DEBUG nova.virt.hardware [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2170.976080] env[63024]: DEBUG nova.virt.hardware [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2170.976247] env[63024]: DEBUG nova.virt.hardware [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2170.976421] env[63024]: DEBUG nova.virt.hardware [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2170.977292] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34e55f8a-cf25-45a9-be24-3dbb1503919d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2170.985447] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90b4b658-27fb-4ec0-9519-7a1e0907636b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.238178] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: b588ea21-dea0-4ee6-8f9e-12007d0a1ce1] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2171.395315] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Releasing lock "refresh_cache-fa326fe2-c00e-4379-954a-9b3275328abc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2171.428242] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-e018a6f3-dbc5-468e-af6a-e3ed4c950007 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Volume attach. Driver type: vmdk {{(pid=63024) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2171.428488] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-e018a6f3-dbc5-468e-af6a-e3ed4c950007 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402263', 'volume_id': 'd5ec318c-cfd2-4b12-bd71-32945e70712e', 'name': 'volume-d5ec318c-cfd2-4b12-bd71-32945e70712e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '94893f45-fb96-463b-82a9-e2fd884b81f8', 'attached_at': '', 'detached_at': '', 'volume_id': 'd5ec318c-cfd2-4b12-bd71-32945e70712e', 'serial': 'd5ec318c-cfd2-4b12-bd71-32945e70712e'} {{(pid=63024) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2171.429394] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e815d6c-70d2-4118-a20b-88f34285be58 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.446928] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e3409a0-1d5a-4972-9054-70131c23d1af {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.474100] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-e018a6f3-dbc5-468e-af6a-e3ed4c950007 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] volume-d5ec318c-cfd2-4b12-bd71-32945e70712e/volume-d5ec318c-cfd2-4b12-bd71-32945e70712e.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2171.474404] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7a95967a-f2c0-4884-801c-5fa9dd484961 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2171.493712] env[63024]: DEBUG oslo_vmware.api [None req-e018a6f3-dbc5-468e-af6a-e3ed4c950007 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 2171.493712] env[63024]: value = "task-1951862" [ 2171.493712] env[63024]: _type = "Task" [ 2171.493712] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2171.501439] env[63024]: DEBUG oslo_vmware.api [None req-e018a6f3-dbc5-468e-af6a-e3ed4c950007 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951862, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2171.653832] env[63024]: DEBUG nova.compute.manager [req-250e8a23-339f-4587-a08c-80ebdae7aff2 req-286c39c1-757a-4858-835e-7964d18edf80 service nova] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Received event network-vif-plugged-94e31c56-bc4b-4f1b-9a4b-4311f832022a {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2171.654104] env[63024]: DEBUG oslo_concurrency.lockutils [req-250e8a23-339f-4587-a08c-80ebdae7aff2 req-286c39c1-757a-4858-835e-7964d18edf80 service nova] Acquiring lock "4c39a074-d380-46a3-b1cc-81d72034b743-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2171.654292] env[63024]: DEBUG oslo_concurrency.lockutils [req-250e8a23-339f-4587-a08c-80ebdae7aff2 req-286c39c1-757a-4858-835e-7964d18edf80 service nova] Lock "4c39a074-d380-46a3-b1cc-81d72034b743-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2171.654528] env[63024]: DEBUG oslo_concurrency.lockutils [req-250e8a23-339f-4587-a08c-80ebdae7aff2 req-286c39c1-757a-4858-835e-7964d18edf80 service nova] Lock "4c39a074-d380-46a3-b1cc-81d72034b743-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2171.654714] env[63024]: DEBUG nova.compute.manager [req-250e8a23-339f-4587-a08c-80ebdae7aff2 req-286c39c1-757a-4858-835e-7964d18edf80 service nova] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] No waiting events found dispatching network-vif-plugged-94e31c56-bc4b-4f1b-9a4b-4311f832022a {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2171.654886] env[63024]: WARNING nova.compute.manager [req-250e8a23-339f-4587-a08c-80ebdae7aff2 req-286c39c1-757a-4858-835e-7964d18edf80 service nova] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Received unexpected event network-vif-plugged-94e31c56-bc4b-4f1b-9a4b-4311f832022a for instance with vm_state building and task_state spawning. [ 2171.699358] env[63024]: DEBUG nova.network.neutron [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Updating instance_info_cache with network_info: [{"id": "f5649c3a-8bab-4abb-a1a2-1d88b780eba2", "address": "fa:16:3e:a9:0a:72", "network": {"id": "27687c23-521d-4beb-ad4f-278994149c2c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-381619610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.138", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e53c02ad56640dc8cbc8839669b67bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "09bf081b-cdf0-4977-abe2-2339a87409ab", "external-id": "nsx-vlan-transportzone-378", "segmentation_id": 378, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5649c3a-8b", "ovs_interfaceid": "f5649c3a-8bab-4abb-a1a2-1d88b780eba2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2171.742093] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 82b7019c-5049-4b8b-abb4-46f326ce3d5b] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2171.837107] env[63024]: DEBUG nova.network.neutron [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Successfully updated port: 94e31c56-bc4b-4f1b-9a4b-4311f832022a {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2171.898792] env[63024]: DEBUG nova.compute.manager [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=63024) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 2171.899040] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2171.899293] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2172.004530] env[63024]: DEBUG oslo_vmware.api [None req-e018a6f3-dbc5-468e-af6a-e3ed4c950007 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951862, 'name': ReconfigVM_Task, 'duration_secs': 0.364293} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2172.004879] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-e018a6f3-dbc5-468e-af6a-e3ed4c950007 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Reconfigured VM instance instance-00000071 to attach disk [datastore1] volume-d5ec318c-cfd2-4b12-bd71-32945e70712e/volume-d5ec318c-cfd2-4b12-bd71-32945e70712e.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2172.009814] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c7da6b53-2f34-44f7-8b02-b47830540059 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2172.028307] env[63024]: DEBUG oslo_vmware.api [None req-e018a6f3-dbc5-468e-af6a-e3ed4c950007 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 2172.028307] env[63024]: value = "task-1951863" [ 2172.028307] env[63024]: _type = "Task" [ 2172.028307] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2172.039747] env[63024]: DEBUG oslo_vmware.api [None req-e018a6f3-dbc5-468e-af6a-e3ed4c950007 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951863, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2172.201983] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Releasing lock "refresh_cache-cb038d54-b785-4930-b8a5-b309c5f4b58d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2172.245723] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: c1fd4146-6dd3-49e9-a744-466e6168e158] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2172.339773] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "refresh_cache-4c39a074-d380-46a3-b1cc-81d72034b743" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2172.339869] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquired lock "refresh_cache-4c39a074-d380-46a3-b1cc-81d72034b743" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2172.340173] env[63024]: DEBUG nova.network.neutron [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2172.404993] env[63024]: DEBUG nova.objects.instance [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lazy-loading 'migration_context' on Instance uuid fa326fe2-c00e-4379-954a-9b3275328abc {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2172.523883] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a4fa07a0-c4f4-4068-9cb1-bf18695837ac tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "a483e6b5-a192-4cfe-be36-1ce0667f5697" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2172.524839] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a4fa07a0-c4f4-4068-9cb1-bf18695837ac tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "a483e6b5-a192-4cfe-be36-1ce0667f5697" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2172.524839] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a4fa07a0-c4f4-4068-9cb1-bf18695837ac tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "a483e6b5-a192-4cfe-be36-1ce0667f5697-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2172.524839] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a4fa07a0-c4f4-4068-9cb1-bf18695837ac tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "a483e6b5-a192-4cfe-be36-1ce0667f5697-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2172.524839] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a4fa07a0-c4f4-4068-9cb1-bf18695837ac tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "a483e6b5-a192-4cfe-be36-1ce0667f5697-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2172.526795] env[63024]: INFO nova.compute.manager [None req-a4fa07a0-c4f4-4068-9cb1-bf18695837ac tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Terminating instance [ 2172.538575] env[63024]: DEBUG oslo_vmware.api [None req-e018a6f3-dbc5-468e-af6a-e3ed4c950007 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951863, 'name': ReconfigVM_Task, 'duration_secs': 0.126325} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2172.538831] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-e018a6f3-dbc5-468e-af6a-e3ed4c950007 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402263', 'volume_id': 'd5ec318c-cfd2-4b12-bd71-32945e70712e', 'name': 'volume-d5ec318c-cfd2-4b12-bd71-32945e70712e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '94893f45-fb96-463b-82a9-e2fd884b81f8', 'attached_at': '', 'detached_at': '', 'volume_id': 'd5ec318c-cfd2-4b12-bd71-32945e70712e', 'serial': 'd5ec318c-cfd2-4b12-bd71-32945e70712e'} {{(pid=63024) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2172.705998] env[63024]: DEBUG nova.compute.manager [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=63024) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 2172.748323] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 37792b57-3347-4134-a060-53359afa3298] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2172.871537] env[63024]: DEBUG nova.network.neutron [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2173.031959] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b416148a-0b05-434b-a571-892d9dfbdb30 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2173.036672] env[63024]: DEBUG nova.compute.manager [None req-a4fa07a0-c4f4-4068-9cb1-bf18695837ac tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2173.036916] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4fa07a0-c4f4-4068-9cb1-bf18695837ac tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2173.037446] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f50297c4-bf19-4228-bcd7-d3fdd7c18330 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2173.046699] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d113a53c-f6b0-447c-88a5-0f96a468c02a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2173.050158] env[63024]: DEBUG oslo_vmware.api [None req-a4fa07a0-c4f4-4068-9cb1-bf18695837ac tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 2173.050158] env[63024]: value = "task-1951864" [ 2173.050158] env[63024]: _type = "Task" [ 2173.050158] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2173.050979] env[63024]: DEBUG nova.network.neutron [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Updating instance_info_cache with network_info: [{"id": "94e31c56-bc4b-4f1b-9a4b-4311f832022a", "address": "fa:16:3e:f4:50:6f", "network": {"id": "c25a78c5-2c6d-4c87-8575-9620fbc983bd", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1693993539-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93098ad83ae144bf90a12c97ec863c06", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e66c4ebe-f808-4b34-bdb5-6c45edb1736f", "external-id": "cl2-zone-719", "segmentation_id": 719, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94e31c56-bc", "ovs_interfaceid": "94e31c56-bc4b-4f1b-9a4b-4311f832022a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2173.084839] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-428fa5de-19a3-4a89-ae6c-d8696bbacd01 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2173.091360] env[63024]: DEBUG oslo_vmware.api [None req-a4fa07a0-c4f4-4068-9cb1-bf18695837ac tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951864, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2173.097014] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b5d390e-b93d-4df1-a5cf-8f3f43633689 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2173.110867] env[63024]: DEBUG nova.compute.provider_tree [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2173.252187] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: df2933d1-32c3-48a6-8ceb-d5e3047d0b78] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2173.561814] env[63024]: DEBUG oslo_vmware.api [None req-a4fa07a0-c4f4-4068-9cb1-bf18695837ac tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951864, 'name': PowerOffVM_Task, 'duration_secs': 0.245114} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2173.562120] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4fa07a0-c4f4-4068-9cb1-bf18695837ac tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2173.562327] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4fa07a0-c4f4-4068-9cb1-bf18695837ac tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Volume detach. Driver type: vmdk {{(pid=63024) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2173.562541] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4fa07a0-c4f4-4068-9cb1-bf18695837ac tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402248', 'volume_id': 'f9f5f8da-d54f-41bf-9fd3-c3e75748a910', 'name': 'volume-f9f5f8da-d54f-41bf-9fd3-c3e75748a910', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': 'a483e6b5-a192-4cfe-be36-1ce0667f5697', 'attached_at': '2024-12-22T11:16:23.000000', 'detached_at': '', 'volume_id': 'f9f5f8da-d54f-41bf-9fd3-c3e75748a910', 'serial': 'f9f5f8da-d54f-41bf-9fd3-c3e75748a910'} {{(pid=63024) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2173.563350] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-668de541-8a9d-4ef0-b38e-cd6438d43a0b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2173.583053] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Releasing lock "refresh_cache-4c39a074-d380-46a3-b1cc-81d72034b743" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2173.583053] env[63024]: DEBUG nova.compute.manager [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Instance network_info: |[{"id": "94e31c56-bc4b-4f1b-9a4b-4311f832022a", "address": "fa:16:3e:f4:50:6f", "network": {"id": "c25a78c5-2c6d-4c87-8575-9620fbc983bd", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1693993539-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93098ad83ae144bf90a12c97ec863c06", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e66c4ebe-f808-4b34-bdb5-6c45edb1736f", "external-id": "cl2-zone-719", "segmentation_id": 719, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94e31c56-bc", "ovs_interfaceid": "94e31c56-bc4b-4f1b-9a4b-4311f832022a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2173.583259] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f4:50:6f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e66c4ebe-f808-4b34-bdb5-6c45edb1736f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '94e31c56-bc4b-4f1b-9a4b-4311f832022a', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2173.590448] env[63024]: DEBUG oslo.service.loopingcall [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2173.591123] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c145beb8-f8f5-48fc-aa34-3f773e0c5493 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2173.593786] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2173.594574] env[63024]: DEBUG nova.objects.instance [None req-e018a6f3-dbc5-468e-af6a-e3ed4c950007 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lazy-loading 'flavor' on Instance uuid 94893f45-fb96-463b-82a9-e2fd884b81f8 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2173.595785] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e0b5ebd2-f4ca-4e90-ac56-53616e950fa9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2173.614959] env[63024]: DEBUG nova.scheduler.client.report [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2173.619665] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a271c49-ed21-479e-8a38-97e2852a60b0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2173.621979] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2173.621979] env[63024]: value = "task-1951865" [ 2173.621979] env[63024]: _type = "Task" [ 2173.621979] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2173.639512] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-463ec84a-bbca-4a60-9d58-d34151220a6e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2173.645277] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951865, 'name': CreateVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2173.658337] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4fa07a0-c4f4-4068-9cb1-bf18695837ac tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] The volume has not been displaced from its original location: [datastore1] volume-f9f5f8da-d54f-41bf-9fd3-c3e75748a910/volume-f9f5f8da-d54f-41bf-9fd3-c3e75748a910.vmdk. No consolidation needed. {{(pid=63024) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2173.663655] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4fa07a0-c4f4-4068-9cb1-bf18695837ac tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Reconfiguring VM instance instance-0000006f to detach disk 2000 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2173.663974] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1fc2fb76-7b8e-4885-be70-1705e49c6f21 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2173.679972] env[63024]: DEBUG nova.compute.manager [req-97ed3ae2-db8c-4bcf-902b-1cd88fd5ea56 req-a6b6bef1-d484-452e-b16f-bf1662addf6b service nova] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Received event network-changed-94e31c56-bc4b-4f1b-9a4b-4311f832022a {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2173.680166] env[63024]: DEBUG nova.compute.manager [req-97ed3ae2-db8c-4bcf-902b-1cd88fd5ea56 req-a6b6bef1-d484-452e-b16f-bf1662addf6b service nova] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Refreshing instance network info cache due to event network-changed-94e31c56-bc4b-4f1b-9a4b-4311f832022a. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2173.680390] env[63024]: DEBUG oslo_concurrency.lockutils [req-97ed3ae2-db8c-4bcf-902b-1cd88fd5ea56 req-a6b6bef1-d484-452e-b16f-bf1662addf6b service nova] Acquiring lock "refresh_cache-4c39a074-d380-46a3-b1cc-81d72034b743" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2173.680536] env[63024]: DEBUG oslo_concurrency.lockutils [req-97ed3ae2-db8c-4bcf-902b-1cd88fd5ea56 req-a6b6bef1-d484-452e-b16f-bf1662addf6b service nova] Acquired lock "refresh_cache-4c39a074-d380-46a3-b1cc-81d72034b743" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2173.680720] env[63024]: DEBUG nova.network.neutron [req-97ed3ae2-db8c-4bcf-902b-1cd88fd5ea56 req-a6b6bef1-d484-452e-b16f-bf1662addf6b service nova] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Refreshing network info cache for port 94e31c56-bc4b-4f1b-9a4b-4311f832022a {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2173.685224] env[63024]: DEBUG oslo_vmware.api [None req-a4fa07a0-c4f4-4068-9cb1-bf18695837ac tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 2173.685224] env[63024]: value = "task-1951866" [ 2173.685224] env[63024]: _type = "Task" [ 2173.685224] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2173.692345] env[63024]: DEBUG oslo_vmware.api [None req-a4fa07a0-c4f4-4068-9cb1-bf18695837ac tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951866, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2173.755485] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 650a97b9-911e-44b0-9e82-a6d4cc95c9dd] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2173.800523] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2174.113906] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e018a6f3-dbc5-468e-af6a-e3ed4c950007 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "94893f45-fb96-463b-82a9-e2fd884b81f8" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.286s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2174.136723] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951865, 'name': CreateVM_Task, 'duration_secs': 0.327003} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2174.136901] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2174.144033] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2174.144189] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2174.144539] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2174.145305] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12df970f-5170-46e2-9bb3-aeae57fdf53e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.150089] env[63024]: DEBUG oslo_vmware.api [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2174.150089] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52a8c5aa-d2c6-1826-4413-2c6f74e3db05" [ 2174.150089] env[63024]: _type = "Task" [ 2174.150089] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2174.158722] env[63024]: DEBUG oslo_vmware.api [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52a8c5aa-d2c6-1826-4413-2c6f74e3db05, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2174.195409] env[63024]: DEBUG oslo_vmware.api [None req-a4fa07a0-c4f4-4068-9cb1-bf18695837ac tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951866, 'name': ReconfigVM_Task, 'duration_secs': 0.150649} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2174.195868] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4fa07a0-c4f4-4068-9cb1-bf18695837ac tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Reconfigured VM instance instance-0000006f to detach disk 2000 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2174.200472] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9dbf9987-38ba-4e21-80ca-66a5bb60c7d0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.216365] env[63024]: DEBUG oslo_vmware.api [None req-a4fa07a0-c4f4-4068-9cb1-bf18695837ac tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 2174.216365] env[63024]: value = "task-1951867" [ 2174.216365] env[63024]: _type = "Task" [ 2174.216365] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2174.224377] env[63024]: DEBUG oslo_vmware.api [None req-a4fa07a0-c4f4-4068-9cb1-bf18695837ac tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951867, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2174.259105] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: b7f26f0e-d5a9-42a6-8af2-065659f89cf5] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2174.313793] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21452656-991f-4bde-80d3-5e025d33f282 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquiring lock "94893f45-fb96-463b-82a9-e2fd884b81f8" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2174.313793] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21452656-991f-4bde-80d3-5e025d33f282 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "94893f45-fb96-463b-82a9-e2fd884b81f8" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2174.471616] env[63024]: DEBUG nova.network.neutron [req-97ed3ae2-db8c-4bcf-902b-1cd88fd5ea56 req-a6b6bef1-d484-452e-b16f-bf1662addf6b service nova] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Updated VIF entry in instance network info cache for port 94e31c56-bc4b-4f1b-9a4b-4311f832022a. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2174.471971] env[63024]: DEBUG nova.network.neutron [req-97ed3ae2-db8c-4bcf-902b-1cd88fd5ea56 req-a6b6bef1-d484-452e-b16f-bf1662addf6b service nova] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Updating instance_info_cache with network_info: [{"id": "94e31c56-bc4b-4f1b-9a4b-4311f832022a", "address": "fa:16:3e:f4:50:6f", "network": {"id": "c25a78c5-2c6d-4c87-8575-9620fbc983bd", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1693993539-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93098ad83ae144bf90a12c97ec863c06", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e66c4ebe-f808-4b34-bdb5-6c45edb1736f", "external-id": "cl2-zone-719", "segmentation_id": 719, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap94e31c56-bc", "ovs_interfaceid": "94e31c56-bc4b-4f1b-9a4b-4311f832022a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2174.633449] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.734s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2174.639429] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.839s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2174.661924] env[63024]: DEBUG oslo_vmware.api [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52a8c5aa-d2c6-1826-4413-2c6f74e3db05, 'name': SearchDatastore_Task, 'duration_secs': 0.010776} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2174.662276] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2174.662616] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2174.662866] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2174.663030] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2174.663220] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2174.663480] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bc14b053-08e1-49d3-86f0-cfac461e208b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.671871] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2174.672053] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2174.672745] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f10f6d49-d666-45a9-93ad-8f467a9a31ce {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.679124] env[63024]: DEBUG oslo_vmware.api [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2174.679124] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52d7b6b4-2519-b3e8-8380-cdd614f0402c" [ 2174.679124] env[63024]: _type = "Task" [ 2174.679124] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2174.690926] env[63024]: DEBUG oslo_vmware.api [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52d7b6b4-2519-b3e8-8380-cdd614f0402c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2174.727998] env[63024]: DEBUG oslo_vmware.api [None req-a4fa07a0-c4f4-4068-9cb1-bf18695837ac tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951867, 'name': ReconfigVM_Task, 'duration_secs': 0.125865} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2174.728301] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-a4fa07a0-c4f4-4068-9cb1-bf18695837ac tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402248', 'volume_id': 'f9f5f8da-d54f-41bf-9fd3-c3e75748a910', 'name': 'volume-f9f5f8da-d54f-41bf-9fd3-c3e75748a910', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': 'a483e6b5-a192-4cfe-be36-1ce0667f5697', 'attached_at': '2024-12-22T11:16:23.000000', 'detached_at': '', 'volume_id': 'f9f5f8da-d54f-41bf-9fd3-c3e75748a910', 'serial': 'f9f5f8da-d54f-41bf-9fd3-c3e75748a910'} {{(pid=63024) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2174.728564] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-a4fa07a0-c4f4-4068-9cb1-bf18695837ac tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2174.729313] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccd789af-2c87-4d8f-874a-f9aacd1d1e07 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.735692] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-a4fa07a0-c4f4-4068-9cb1-bf18695837ac tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2174.735907] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6e2b9855-91d2-4d58-9fec-7fc10b59ac2e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.762056] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 9716d592-32d1-4f1d-b42b-1c8a7d81d2f2] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2174.813681] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-a4fa07a0-c4f4-4068-9cb1-bf18695837ac tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2174.813992] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-a4fa07a0-c4f4-4068-9cb1-bf18695837ac tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2174.814276] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4fa07a0-c4f4-4068-9cb1-bf18695837ac tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Deleting the datastore file [datastore1] a483e6b5-a192-4cfe-be36-1ce0667f5697 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2174.814641] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-75b9f8d4-0daf-4a64-813f-dd2e2e151da5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.817156] env[63024]: INFO nova.compute.manager [None req-21452656-991f-4bde-80d3-5e025d33f282 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Detaching volume d5ec318c-cfd2-4b12-bd71-32945e70712e [ 2174.826027] env[63024]: DEBUG oslo_vmware.api [None req-a4fa07a0-c4f4-4068-9cb1-bf18695837ac tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 2174.826027] env[63024]: value = "task-1951869" [ 2174.826027] env[63024]: _type = "Task" [ 2174.826027] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2174.833848] env[63024]: DEBUG oslo_vmware.api [None req-a4fa07a0-c4f4-4068-9cb1-bf18695837ac tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951869, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2174.852312] env[63024]: INFO nova.virt.block_device [None req-21452656-991f-4bde-80d3-5e025d33f282 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Attempting to driver detach volume d5ec318c-cfd2-4b12-bd71-32945e70712e from mountpoint /dev/sdb [ 2174.852888] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-21452656-991f-4bde-80d3-5e025d33f282 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Volume detach. Driver type: vmdk {{(pid=63024) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2174.852888] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-21452656-991f-4bde-80d3-5e025d33f282 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402263', 'volume_id': 'd5ec318c-cfd2-4b12-bd71-32945e70712e', 'name': 'volume-d5ec318c-cfd2-4b12-bd71-32945e70712e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '94893f45-fb96-463b-82a9-e2fd884b81f8', 'attached_at': '', 'detached_at': '', 'volume_id': 'd5ec318c-cfd2-4b12-bd71-32945e70712e', 'serial': 'd5ec318c-cfd2-4b12-bd71-32945e70712e'} {{(pid=63024) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2174.853850] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83fef848-2bc7-44fd-b473-035d7ee1dcf2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.874658] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29de3577-f2c8-4e5f-93f0-dea06da9fbde {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.881725] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9c240e6-25e8-405f-a43e-ffb8566c8836 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.901526] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d56ca039-4706-442b-a05a-cd448ca21b09 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.916999] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-21452656-991f-4bde-80d3-5e025d33f282 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] The volume has not been displaced from its original location: [datastore1] volume-d5ec318c-cfd2-4b12-bd71-32945e70712e/volume-d5ec318c-cfd2-4b12-bd71-32945e70712e.vmdk. No consolidation needed. {{(pid=63024) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2174.922119] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-21452656-991f-4bde-80d3-5e025d33f282 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Reconfiguring VM instance instance-00000071 to detach disk 2001 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2174.922734] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-59c66ea8-c8a7-42fb-a776-6f41383f7dce {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2174.940870] env[63024]: DEBUG oslo_vmware.api [None req-21452656-991f-4bde-80d3-5e025d33f282 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 2174.940870] env[63024]: value = "task-1951870" [ 2174.940870] env[63024]: _type = "Task" [ 2174.940870] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2174.948795] env[63024]: DEBUG oslo_vmware.api [None req-21452656-991f-4bde-80d3-5e025d33f282 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951870, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2174.974365] env[63024]: DEBUG oslo_concurrency.lockutils [req-97ed3ae2-db8c-4bcf-902b-1cd88fd5ea56 req-a6b6bef1-d484-452e-b16f-bf1662addf6b service nova] Releasing lock "refresh_cache-4c39a074-d380-46a3-b1cc-81d72034b743" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2175.147671] env[63024]: DEBUG nova.objects.instance [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lazy-loading 'migration_context' on Instance uuid cb038d54-b785-4930-b8a5-b309c5f4b58d {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2175.189807] env[63024]: DEBUG oslo_vmware.api [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52d7b6b4-2519-b3e8-8380-cdd614f0402c, 'name': SearchDatastore_Task, 'duration_secs': 0.010899} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2175.190628] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8bd397aa-6371-45d5-82c5-dc80a1a83d8a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.196424] env[63024]: DEBUG oslo_vmware.api [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2175.196424] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e0ea31-f7f0-4da0-129d-8b2a3fd766c6" [ 2175.196424] env[63024]: _type = "Task" [ 2175.196424] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2175.207407] env[63024]: DEBUG oslo_vmware.api [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e0ea31-f7f0-4da0-129d-8b2a3fd766c6, 'name': SearchDatastore_Task, 'duration_secs': 0.009365} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2175.207639] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2175.207885] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 4c39a074-d380-46a3-b1cc-81d72034b743/4c39a074-d380-46a3-b1cc-81d72034b743.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2175.208139] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fd0aa871-aea3-478c-aba5-d327fd943e3a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.214349] env[63024]: DEBUG oslo_vmware.api [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2175.214349] env[63024]: value = "task-1951871" [ 2175.214349] env[63024]: _type = "Task" [ 2175.214349] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2175.221583] env[63024]: DEBUG oslo_vmware.api [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951871, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2175.265614] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: f1b1cbe0-0ad3-41ee-9b8c-cf975ec500df] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2175.335332] env[63024]: DEBUG oslo_vmware.api [None req-a4fa07a0-c4f4-4068-9cb1-bf18695837ac tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951869, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.078241} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2175.335614] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4fa07a0-c4f4-4068-9cb1-bf18695837ac tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2175.335833] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-a4fa07a0-c4f4-4068-9cb1-bf18695837ac tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2175.336021] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-a4fa07a0-c4f4-4068-9cb1-bf18695837ac tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2175.336202] env[63024]: INFO nova.compute.manager [None req-a4fa07a0-c4f4-4068-9cb1-bf18695837ac tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Took 2.30 seconds to destroy the instance on the hypervisor. [ 2175.336444] env[63024]: DEBUG oslo.service.loopingcall [None req-a4fa07a0-c4f4-4068-9cb1-bf18695837ac tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2175.336638] env[63024]: DEBUG nova.compute.manager [-] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2175.336730] env[63024]: DEBUG nova.network.neutron [-] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2175.450892] env[63024]: DEBUG oslo_vmware.api [None req-21452656-991f-4bde-80d3-5e025d33f282 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951870, 'name': ReconfigVM_Task, 'duration_secs': 0.225104} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2175.451200] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-21452656-991f-4bde-80d3-5e025d33f282 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Reconfigured VM instance instance-00000071 to detach disk 2001 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2175.457028] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d2388aa7-a504-4a4e-a19b-175627d26676 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.479259] env[63024]: DEBUG oslo_vmware.api [None req-21452656-991f-4bde-80d3-5e025d33f282 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 2175.479259] env[63024]: value = "task-1951872" [ 2175.479259] env[63024]: _type = "Task" [ 2175.479259] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2175.490504] env[63024]: DEBUG oslo_vmware.api [None req-21452656-991f-4bde-80d3-5e025d33f282 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951872, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2175.727479] env[63024]: DEBUG oslo_vmware.api [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951871, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.48946} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2175.727774] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 4c39a074-d380-46a3-b1cc-81d72034b743/4c39a074-d380-46a3-b1cc-81d72034b743.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2175.727997] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2175.728332] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7acf1ce5-54bc-4583-8c9c-374284551fbc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.734790] env[63024]: DEBUG oslo_vmware.api [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2175.734790] env[63024]: value = "task-1951873" [ 2175.734790] env[63024]: _type = "Task" [ 2175.734790] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2175.743928] env[63024]: DEBUG oslo_vmware.api [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951873, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2175.824958] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d71636c1-a325-4f11-be0c-e29f75d97cd7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.832934] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-414a2a94-9356-45aa-91ac-ab8a70c487ed {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.864762] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9c3bbe0-5567-4a88-ba2c-90472306e16c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.872081] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d62fe6e4-2973-4fe0-93b8-6f3c6ac32c31 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2175.885706] env[63024]: DEBUG nova.compute.provider_tree [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2175.952607] env[63024]: DEBUG nova.compute.manager [req-49bbd737-c45e-4854-af1f-33a14f771221 req-878c91f6-ac0e-4f3c-b47d-c5e0cb28d393 service nova] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Received event network-vif-deleted-d9aaf9d8-7a03-4fe5-8494-40497e138b13 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2175.952665] env[63024]: INFO nova.compute.manager [req-49bbd737-c45e-4854-af1f-33a14f771221 req-878c91f6-ac0e-4f3c-b47d-c5e0cb28d393 service nova] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Neutron deleted interface d9aaf9d8-7a03-4fe5-8494-40497e138b13; detaching it from the instance and deleting it from the info cache [ 2175.952807] env[63024]: DEBUG nova.network.neutron [req-49bbd737-c45e-4854-af1f-33a14f771221 req-878c91f6-ac0e-4f3c-b47d-c5e0cb28d393 service nova] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2175.989567] env[63024]: DEBUG oslo_vmware.api [None req-21452656-991f-4bde-80d3-5e025d33f282 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951872, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2176.175541] env[63024]: INFO nova.compute.manager [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Swapping old allocation on dict_keys(['89dfa68a-133e-436f-a9f1-86051f9fb96b']) held by migration ff148bac-fa34-4401-a31b-fdad842b3d7a for instance [ 2176.196873] env[63024]: DEBUG nova.scheduler.client.report [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Overwriting current allocation {'allocations': {'89dfa68a-133e-436f-a9f1-86051f9fb96b': {'resources': {'VCPU': 1, 'MEMORY_MB': 256, 'DISK_GB': 1}, 'generation': 177}}, 'project_id': '6072e8931d9540ad8fe4a2b4b1ec782d', 'user_id': '28fab1e92c1d4491986100983f6b4ab1', 'consumer_generation': 1} on consumer fa326fe2-c00e-4379-954a-9b3275328abc {{(pid=63024) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2033}} [ 2176.247036] env[63024]: DEBUG oslo_vmware.api [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951873, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061992} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2176.247036] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2176.247786] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a42b4c4-e567-4704-acbc-e99b1ca2ba5b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.268613] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Reconfiguring VM instance instance-00000074 to attach disk [datastore1] 4c39a074-d380-46a3-b1cc-81d72034b743/4c39a074-d380-46a3-b1cc-81d72034b743.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2176.268905] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-10f33064-2f1b-44a3-81c4-90e896b38c61 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.282988] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "refresh_cache-fa326fe2-c00e-4379-954a-9b3275328abc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2176.283166] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquired lock "refresh_cache-fa326fe2-c00e-4379-954a-9b3275328abc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2176.283341] env[63024]: DEBUG nova.network.neutron [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2176.290453] env[63024]: DEBUG oslo_vmware.api [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2176.290453] env[63024]: value = "task-1951874" [ 2176.290453] env[63024]: _type = "Task" [ 2176.290453] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2176.300053] env[63024]: DEBUG oslo_vmware.api [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951874, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2176.390285] env[63024]: DEBUG nova.scheduler.client.report [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2176.431176] env[63024]: DEBUG nova.network.neutron [-] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2176.454785] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f153130e-6a94-492b-8716-8f9409fa4a44 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.464392] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9540b1be-f092-479a-9545-100de3371617 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.488430] env[63024]: DEBUG oslo_vmware.api [None req-21452656-991f-4bde-80d3-5e025d33f282 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951872, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2176.496528] env[63024]: DEBUG nova.compute.manager [req-49bbd737-c45e-4854-af1f-33a14f771221 req-878c91f6-ac0e-4f3c-b47d-c5e0cb28d393 service nova] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Detach interface failed, port_id=d9aaf9d8-7a03-4fe5-8494-40497e138b13, reason: Instance a483e6b5-a192-4cfe-be36-1ce0667f5697 could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 2176.800212] env[63024]: DEBUG oslo_vmware.api [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951874, 'name': ReconfigVM_Task, 'duration_secs': 0.283456} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2176.800539] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Reconfigured VM instance instance-00000074 to attach disk [datastore1] 4c39a074-d380-46a3-b1cc-81d72034b743/4c39a074-d380-46a3-b1cc-81d72034b743.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2176.801155] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-07281f19-e594-4791-acb3-b92b52c37967 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2176.808782] env[63024]: DEBUG oslo_vmware.api [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2176.808782] env[63024]: value = "task-1951875" [ 2176.808782] env[63024]: _type = "Task" [ 2176.808782] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2176.816504] env[63024]: DEBUG oslo_vmware.api [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951875, 'name': Rename_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2176.934322] env[63024]: INFO nova.compute.manager [-] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Took 1.60 seconds to deallocate network for instance. [ 2176.991216] env[63024]: DEBUG oslo_vmware.api [None req-21452656-991f-4bde-80d3-5e025d33f282 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951872, 'name': ReconfigVM_Task, 'duration_secs': 1.151386} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2176.991514] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-21452656-991f-4bde-80d3-5e025d33f282 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402263', 'volume_id': 'd5ec318c-cfd2-4b12-bd71-32945e70712e', 'name': 'volume-d5ec318c-cfd2-4b12-bd71-32945e70712e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '94893f45-fb96-463b-82a9-e2fd884b81f8', 'attached_at': '', 'detached_at': '', 'volume_id': 'd5ec318c-cfd2-4b12-bd71-32945e70712e', 'serial': 'd5ec318c-cfd2-4b12-bd71-32945e70712e'} {{(pid=63024) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2177.058688] env[63024]: DEBUG nova.network.neutron [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Updating instance_info_cache with network_info: [{"id": "a13f8ab3-d900-447f-8772-5be6b3d48296", "address": "fa:16:3e:47:7f:73", "network": {"id": "0cbe22f7-6322-4d92-9a77-2753f6449a2d", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-366684254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.141", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6072e8931d9540ad8fe4a2b4b1ec782d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3ccbdbb-8b49-4a26-913f-2a448b72280f", "external-id": "nsx-vlan-transportzone-412", "segmentation_id": 412, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa13f8ab3-d9", "ovs_interfaceid": "a13f8ab3-d900-447f-8772-5be6b3d48296", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2177.319128] env[63024]: DEBUG oslo_vmware.api [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951875, 'name': Rename_Task, 'duration_secs': 0.133157} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2177.319405] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2177.319647] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5ebb2c97-5b31-4796-b924-500f4bae8d9f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2177.325912] env[63024]: DEBUG oslo_vmware.api [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2177.325912] env[63024]: value = "task-1951876" [ 2177.325912] env[63024]: _type = "Task" [ 2177.325912] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2177.334993] env[63024]: DEBUG oslo_vmware.api [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951876, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2177.400910] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.761s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2177.477597] env[63024]: INFO nova.compute.manager [None req-a4fa07a0-c4f4-4068-9cb1-bf18695837ac tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Took 0.54 seconds to detach 1 volumes for instance. [ 2177.479925] env[63024]: DEBUG nova.compute.manager [None req-a4fa07a0-c4f4-4068-9cb1-bf18695837ac tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Deleting volume: f9f5f8da-d54f-41bf-9fd3-c3e75748a910 {{(pid=63024) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 2177.543024] env[63024]: DEBUG nova.objects.instance [None req-21452656-991f-4bde-80d3-5e025d33f282 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lazy-loading 'flavor' on Instance uuid 94893f45-fb96-463b-82a9-e2fd884b81f8 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2177.561142] env[63024]: DEBUG oslo_concurrency.lockutils [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Releasing lock "refresh_cache-fa326fe2-c00e-4379-954a-9b3275328abc" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2177.561793] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2177.562170] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-44445022-60c7-41e9-85f7-93495f5bc8b5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2177.570242] env[63024]: DEBUG oslo_vmware.api [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 2177.570242] env[63024]: value = "task-1951878" [ 2177.570242] env[63024]: _type = "Task" [ 2177.570242] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2177.579643] env[63024]: DEBUG oslo_vmware.api [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951878, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2177.837056] env[63024]: DEBUG oslo_vmware.api [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951876, 'name': PowerOnVM_Task, 'duration_secs': 0.470753} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2177.837056] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2177.837056] env[63024]: INFO nova.compute.manager [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Took 6.89 seconds to spawn the instance on the hypervisor. [ 2177.837056] env[63024]: DEBUG nova.compute.manager [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2177.837765] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b385ac33-d8bb-452e-b193-184c55785041 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.017630] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a4fa07a0-c4f4-4068-9cb1-bf18695837ac tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2178.018110] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a4fa07a0-c4f4-4068-9cb1-bf18695837ac tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2178.018444] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a4fa07a0-c4f4-4068-9cb1-bf18695837ac tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2178.040851] env[63024]: INFO nova.scheduler.client.report [None req-a4fa07a0-c4f4-4068-9cb1-bf18695837ac tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Deleted allocations for instance a483e6b5-a192-4cfe-be36-1ce0667f5697 [ 2178.081148] env[63024]: DEBUG oslo_vmware.api [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951878, 'name': PowerOffVM_Task, 'duration_secs': 0.19715} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2178.081443] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2178.082138] env[63024]: DEBUG nova.virt.hardware [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2178.082357] env[63024]: DEBUG nova.virt.hardware [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2178.082544] env[63024]: DEBUG nova.virt.hardware [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2178.082733] env[63024]: DEBUG nova.virt.hardware [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2178.082880] env[63024]: DEBUG nova.virt.hardware [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2178.083042] env[63024]: DEBUG nova.virt.hardware [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2178.084012] env[63024]: DEBUG nova.virt.hardware [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2178.084012] env[63024]: DEBUG nova.virt.hardware [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2178.084012] env[63024]: DEBUG nova.virt.hardware [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2178.084012] env[63024]: DEBUG nova.virt.hardware [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2178.084012] env[63024]: DEBUG nova.virt.hardware [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2178.089508] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6de7849f-e2dc-441c-b718-4c44ed211923 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.104495] env[63024]: DEBUG oslo_vmware.api [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 2178.104495] env[63024]: value = "task-1951879" [ 2178.104495] env[63024]: _type = "Task" [ 2178.104495] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2178.112378] env[63024]: DEBUG oslo_vmware.api [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951879, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2178.356058] env[63024]: INFO nova.compute.manager [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Took 11.66 seconds to build instance. [ 2178.550143] env[63024]: DEBUG oslo_concurrency.lockutils [None req-21452656-991f-4bde-80d3-5e025d33f282 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "94893f45-fb96-463b-82a9-e2fd884b81f8" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.236s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2178.551387] env[63024]: DEBUG oslo_concurrency.lockutils [None req-a4fa07a0-c4f4-4068-9cb1-bf18695837ac tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "a483e6b5-a192-4cfe-be36-1ce0667f5697" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.027s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2178.614060] env[63024]: DEBUG oslo_vmware.api [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951879, 'name': ReconfigVM_Task, 'duration_secs': 0.122449} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2178.615059] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dafb8395-6a29-4b61-862f-d1203eecd46f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.632857] env[63024]: DEBUG nova.virt.hardware [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2178.633102] env[63024]: DEBUG nova.virt.hardware [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2178.633274] env[63024]: DEBUG nova.virt.hardware [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2178.633451] env[63024]: DEBUG nova.virt.hardware [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2178.633600] env[63024]: DEBUG nova.virt.hardware [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2178.633747] env[63024]: DEBUG nova.virt.hardware [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2178.633951] env[63024]: DEBUG nova.virt.hardware [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2178.634122] env[63024]: DEBUG nova.virt.hardware [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2178.634292] env[63024]: DEBUG nova.virt.hardware [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2178.634456] env[63024]: DEBUG nova.virt.hardware [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2178.634657] env[63024]: DEBUG nova.virt.hardware [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2178.635411] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cdbc7a5f-676a-439f-b01a-60ded78acbfa {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2178.641260] env[63024]: DEBUG oslo_vmware.api [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 2178.641260] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5260ddc8-be82-23d2-4445-3801ec36c08d" [ 2178.641260] env[63024]: _type = "Task" [ 2178.641260] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2178.649617] env[63024]: DEBUG oslo_vmware.api [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5260ddc8-be82-23d2-4445-3801ec36c08d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2178.860690] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6d55a938-1020-435e-a177-834d82345904 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "4c39a074-d380-46a3-b1cc-81d72034b743" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.175s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2178.938953] env[63024]: INFO nova.compute.manager [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Swapping old allocation on dict_keys(['89dfa68a-133e-436f-a9f1-86051f9fb96b']) held by migration 6b493773-408a-454d-801f-b4476cfe41cb for instance [ 2178.963308] env[63024]: DEBUG nova.scheduler.client.report [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Overwriting current allocation {'allocations': {'89dfa68a-133e-436f-a9f1-86051f9fb96b': {'resources': {'VCPU': 1, 'MEMORY_MB': 256, 'DISK_GB': 1}, 'generation': 178}}, 'project_id': '0e53c02ad56640dc8cbc8839669b67bf', 'user_id': '27151e89c8ee4ddd9285bff3795a82b2', 'consumer_generation': 1} on consumer cb038d54-b785-4930-b8a5-b309c5f4b58d {{(pid=63024) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2033}} [ 2179.063519] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "refresh_cache-cb038d54-b785-4930-b8a5-b309c5f4b58d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2179.063519] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquired lock "refresh_cache-cb038d54-b785-4930-b8a5-b309c5f4b58d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2179.063519] env[63024]: DEBUG nova.network.neutron [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2179.152878] env[63024]: DEBUG oslo_vmware.api [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5260ddc8-be82-23d2-4445-3801ec36c08d, 'name': SearchDatastore_Task, 'duration_secs': 0.007534} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2179.161095] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Reconfiguring VM instance instance-0000006e to detach disk 2000 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2179.162029] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5965a3b7-0527-4b99-b3d1-02d976262032 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.182228] env[63024]: DEBUG oslo_vmware.api [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 2179.182228] env[63024]: value = "task-1951880" [ 2179.182228] env[63024]: _type = "Task" [ 2179.182228] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2179.190444] env[63024]: DEBUG oslo_vmware.api [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951880, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2179.490073] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d1963b51-736e-4577-a265-5872e0d59006 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "4c39a074-d380-46a3-b1cc-81d72034b743" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2179.490247] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d1963b51-736e-4577-a265-5872e0d59006 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "4c39a074-d380-46a3-b1cc-81d72034b743" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2179.490299] env[63024]: DEBUG nova.compute.manager [None req-d1963b51-736e-4577-a265-5872e0d59006 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2179.491247] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b410a82a-5c86-4df2-b6c7-fa63bbd1adca {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.498239] env[63024]: DEBUG nova.compute.manager [None req-d1963b51-736e-4577-a265-5872e0d59006 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63024) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 2179.498787] env[63024]: DEBUG nova.objects.instance [None req-d1963b51-736e-4577-a265-5872e0d59006 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lazy-loading 'flavor' on Instance uuid 4c39a074-d380-46a3-b1cc-81d72034b743 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2179.575387] env[63024]: DEBUG oslo_concurrency.lockutils [None req-796be5ed-bb0c-42a4-a64b-81b73897b8b2 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "0d253199-adf8-45c0-a6bf-b11c12b08688" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2179.575570] env[63024]: DEBUG oslo_concurrency.lockutils [None req-796be5ed-bb0c-42a4-a64b-81b73897b8b2 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "0d253199-adf8-45c0-a6bf-b11c12b08688" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2179.575726] env[63024]: DEBUG oslo_concurrency.lockutils [None req-796be5ed-bb0c-42a4-a64b-81b73897b8b2 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "0d253199-adf8-45c0-a6bf-b11c12b08688-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2179.575914] env[63024]: DEBUG oslo_concurrency.lockutils [None req-796be5ed-bb0c-42a4-a64b-81b73897b8b2 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "0d253199-adf8-45c0-a6bf-b11c12b08688-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2179.576102] env[63024]: DEBUG oslo_concurrency.lockutils [None req-796be5ed-bb0c-42a4-a64b-81b73897b8b2 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "0d253199-adf8-45c0-a6bf-b11c12b08688-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2179.578130] env[63024]: INFO nova.compute.manager [None req-796be5ed-bb0c-42a4-a64b-81b73897b8b2 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Terminating instance [ 2179.639363] env[63024]: DEBUG oslo_concurrency.lockutils [None req-27dc3fb1-4402-4dea-9e09-4390c7f5499e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquiring lock "94893f45-fb96-463b-82a9-e2fd884b81f8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2179.639609] env[63024]: DEBUG oslo_concurrency.lockutils [None req-27dc3fb1-4402-4dea-9e09-4390c7f5499e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "94893f45-fb96-463b-82a9-e2fd884b81f8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2179.639813] env[63024]: DEBUG oslo_concurrency.lockutils [None req-27dc3fb1-4402-4dea-9e09-4390c7f5499e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquiring lock "94893f45-fb96-463b-82a9-e2fd884b81f8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2179.640009] env[63024]: DEBUG oslo_concurrency.lockutils [None req-27dc3fb1-4402-4dea-9e09-4390c7f5499e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "94893f45-fb96-463b-82a9-e2fd884b81f8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2179.640186] env[63024]: DEBUG oslo_concurrency.lockutils [None req-27dc3fb1-4402-4dea-9e09-4390c7f5499e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "94893f45-fb96-463b-82a9-e2fd884b81f8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2179.642164] env[63024]: INFO nova.compute.manager [None req-27dc3fb1-4402-4dea-9e09-4390c7f5499e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Terminating instance [ 2179.693956] env[63024]: DEBUG oslo_vmware.api [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951880, 'name': ReconfigVM_Task, 'duration_secs': 0.185004} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2179.694241] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Reconfigured VM instance instance-0000006e to detach disk 2000 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2179.695061] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc80167a-4606-45d2-aeaa-6580c415a478 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.716986] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Reconfiguring VM instance instance-0000006e to attach disk [datastore1] fa326fe2-c00e-4379-954a-9b3275328abc/fa326fe2-c00e-4379-954a-9b3275328abc.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2179.719705] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e8c14a47-48a5-42c2-8cda-0d43c078d9ac {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2179.738500] env[63024]: DEBUG oslo_vmware.api [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 2179.738500] env[63024]: value = "task-1951881" [ 2179.738500] env[63024]: _type = "Task" [ 2179.738500] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2179.746241] env[63024]: DEBUG oslo_vmware.api [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951881, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2179.796627] env[63024]: DEBUG nova.network.neutron [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Updating instance_info_cache with network_info: [{"id": "f5649c3a-8bab-4abb-a1a2-1d88b780eba2", "address": "fa:16:3e:a9:0a:72", "network": {"id": "27687c23-521d-4beb-ad4f-278994149c2c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-381619610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.138", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e53c02ad56640dc8cbc8839669b67bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "09bf081b-cdf0-4977-abe2-2339a87409ab", "external-id": "nsx-vlan-transportzone-378", "segmentation_id": 378, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf5649c3a-8b", "ovs_interfaceid": "f5649c3a-8bab-4abb-a1a2-1d88b780eba2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2180.082467] env[63024]: DEBUG nova.compute.manager [None req-796be5ed-bb0c-42a4-a64b-81b73897b8b2 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2180.082782] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-796be5ed-bb0c-42a4-a64b-81b73897b8b2 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2180.083659] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3284b661-1edd-4533-afc6-786b69eb246f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.091477] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-796be5ed-bb0c-42a4-a64b-81b73897b8b2 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2180.091733] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0f1080e1-88c7-427f-ab79-43ba12a6d9af {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.098557] env[63024]: DEBUG oslo_vmware.api [None req-796be5ed-bb0c-42a4-a64b-81b73897b8b2 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 2180.098557] env[63024]: value = "task-1951882" [ 2180.098557] env[63024]: _type = "Task" [ 2180.098557] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2180.107393] env[63024]: DEBUG oslo_vmware.api [None req-796be5ed-bb0c-42a4-a64b-81b73897b8b2 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951882, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2180.145830] env[63024]: DEBUG nova.compute.manager [None req-27dc3fb1-4402-4dea-9e09-4390c7f5499e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2180.146108] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-27dc3fb1-4402-4dea-9e09-4390c7f5499e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2180.147036] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-197abd05-5098-491c-81af-6a25d8d4d10b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.155012] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-27dc3fb1-4402-4dea-9e09-4390c7f5499e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2180.155357] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6ea9040a-3149-4cbc-bc46-b90443cdad20 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.162658] env[63024]: DEBUG oslo_vmware.api [None req-27dc3fb1-4402-4dea-9e09-4390c7f5499e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 2180.162658] env[63024]: value = "task-1951883" [ 2180.162658] env[63024]: _type = "Task" [ 2180.162658] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2180.171344] env[63024]: DEBUG oslo_vmware.api [None req-27dc3fb1-4402-4dea-9e09-4390c7f5499e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951883, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2180.251129] env[63024]: DEBUG oslo_vmware.api [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951881, 'name': ReconfigVM_Task, 'duration_secs': 0.243204} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2180.251611] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Reconfigured VM instance instance-0000006e to attach disk [datastore1] fa326fe2-c00e-4379-954a-9b3275328abc/fa326fe2-c00e-4379-954a-9b3275328abc.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2180.253052] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3837e70a-9188-4520-a7eb-bcb19ae882c2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.274797] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b02cf7ac-7b9a-473d-94c5-9f1348ea01ab {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.295571] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87135c27-db75-4566-b458-0e618729ac27 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.299160] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Releasing lock "refresh_cache-cb038d54-b785-4930-b8a5-b309c5f4b58d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2180.300106] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0795a55-bdcf-4e56-a038-aac37c06b124 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.322503] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deef1c26-d257-4ecb-8a1b-9227d02bedb5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.326223] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d4d8282-290b-4ddd-a20d-c0af495eb4e6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.334100] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2180.334393] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7aa13405-4ce6-412b-961e-0048fd237562 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.344571] env[63024]: DEBUG oslo_vmware.api [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 2180.344571] env[63024]: value = "task-1951884" [ 2180.344571] env[63024]: _type = "Task" [ 2180.344571] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2180.354508] env[63024]: DEBUG oslo_vmware.api [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951884, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2180.507655] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1963b51-736e-4577-a265-5872e0d59006 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2180.508080] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0a82228d-bf5f-47e0-a8eb-1b5eda04fa67 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.516877] env[63024]: DEBUG oslo_vmware.api [None req-d1963b51-736e-4577-a265-5872e0d59006 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2180.516877] env[63024]: value = "task-1951885" [ 2180.516877] env[63024]: _type = "Task" [ 2180.516877] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2180.525407] env[63024]: DEBUG oslo_vmware.api [None req-d1963b51-736e-4577-a265-5872e0d59006 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951885, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2180.608661] env[63024]: DEBUG oslo_vmware.api [None req-796be5ed-bb0c-42a4-a64b-81b73897b8b2 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951882, 'name': PowerOffVM_Task, 'duration_secs': 0.207145} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2180.609042] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-796be5ed-bb0c-42a4-a64b-81b73897b8b2 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2180.609240] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-796be5ed-bb0c-42a4-a64b-81b73897b8b2 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2180.609560] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-90b17690-6212-43a7-9cd6-3b246892f361 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.675453] env[63024]: DEBUG oslo_vmware.api [None req-27dc3fb1-4402-4dea-9e09-4390c7f5499e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951883, 'name': PowerOffVM_Task, 'duration_secs': 0.180516} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2180.675784] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-27dc3fb1-4402-4dea-9e09-4390c7f5499e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2180.676046] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-27dc3fb1-4402-4dea-9e09-4390c7f5499e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2180.676387] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fcd68b91-e60c-449f-9a91-7adfde83fb24 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.822612] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-796be5ed-bb0c-42a4-a64b-81b73897b8b2 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2180.822845] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-796be5ed-bb0c-42a4-a64b-81b73897b8b2 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2180.823040] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-796be5ed-bb0c-42a4-a64b-81b73897b8b2 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Deleting the datastore file [datastore1] 0d253199-adf8-45c0-a6bf-b11c12b08688 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2180.823315] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e0614c33-568b-4b84-a3d6-918be21fc802 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.829482] env[63024]: DEBUG oslo_vmware.api [None req-796be5ed-bb0c-42a4-a64b-81b73897b8b2 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 2180.829482] env[63024]: value = "task-1951888" [ 2180.829482] env[63024]: _type = "Task" [ 2180.829482] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2180.837689] env[63024]: DEBUG oslo_vmware.api [None req-796be5ed-bb0c-42a4-a64b-81b73897b8b2 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951888, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2180.838792] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-27dc3fb1-4402-4dea-9e09-4390c7f5499e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2180.838989] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-27dc3fb1-4402-4dea-9e09-4390c7f5499e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2180.839183] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-27dc3fb1-4402-4dea-9e09-4390c7f5499e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Deleting the datastore file [datastore1] 94893f45-fb96-463b-82a9-e2fd884b81f8 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2180.839417] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-76037dc2-f5cf-4f93-b1b4-3ddca5a13265 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2180.845500] env[63024]: DEBUG oslo_vmware.api [None req-27dc3fb1-4402-4dea-9e09-4390c7f5499e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 2180.845500] env[63024]: value = "task-1951889" [ 2180.845500] env[63024]: _type = "Task" [ 2180.845500] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2180.855817] env[63024]: DEBUG oslo_vmware.api [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951884, 'name': PowerOnVM_Task, 'duration_secs': 0.353556} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2180.858544] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2180.861775] env[63024]: DEBUG oslo_vmware.api [None req-27dc3fb1-4402-4dea-9e09-4390c7f5499e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951889, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2181.025966] env[63024]: DEBUG oslo_vmware.api [None req-d1963b51-736e-4577-a265-5872e0d59006 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951885, 'name': PowerOffVM_Task, 'duration_secs': 0.175527} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2181.026249] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1963b51-736e-4577-a265-5872e0d59006 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2181.026453] env[63024]: DEBUG nova.compute.manager [None req-d1963b51-736e-4577-a265-5872e0d59006 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2181.027190] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eebdde0-774e-44ba-bb22-c85a14e3be7f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.342236] env[63024]: DEBUG oslo_vmware.api [None req-796be5ed-bb0c-42a4-a64b-81b73897b8b2 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951888, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145315} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2181.342627] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-796be5ed-bb0c-42a4-a64b-81b73897b8b2 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2181.342894] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-796be5ed-bb0c-42a4-a64b-81b73897b8b2 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2181.343182] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-796be5ed-bb0c-42a4-a64b-81b73897b8b2 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2181.343442] env[63024]: INFO nova.compute.manager [None req-796be5ed-bb0c-42a4-a64b-81b73897b8b2 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Took 1.26 seconds to destroy the instance on the hypervisor. [ 2181.343828] env[63024]: DEBUG oslo.service.loopingcall [None req-796be5ed-bb0c-42a4-a64b-81b73897b8b2 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2181.344053] env[63024]: DEBUG nova.compute.manager [-] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2181.344202] env[63024]: DEBUG nova.network.neutron [-] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2181.360661] env[63024]: DEBUG oslo_vmware.api [None req-27dc3fb1-4402-4dea-9e09-4390c7f5499e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951889, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144427} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2181.361097] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-27dc3fb1-4402-4dea-9e09-4390c7f5499e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2181.361464] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-27dc3fb1-4402-4dea-9e09-4390c7f5499e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2181.361711] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-27dc3fb1-4402-4dea-9e09-4390c7f5499e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2181.361992] env[63024]: INFO nova.compute.manager [None req-27dc3fb1-4402-4dea-9e09-4390c7f5499e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Took 1.22 seconds to destroy the instance on the hypervisor. [ 2181.362361] env[63024]: DEBUG oslo.service.loopingcall [None req-27dc3fb1-4402-4dea-9e09-4390c7f5499e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2181.362737] env[63024]: DEBUG nova.compute.manager [-] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2181.362894] env[63024]: DEBUG nova.network.neutron [-] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2181.411432] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2181.411988] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4d37c209-ce25-4641-9299-ce0ea10de574 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.418856] env[63024]: DEBUG oslo_vmware.api [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2181.418856] env[63024]: value = "task-1951890" [ 2181.418856] env[63024]: _type = "Task" [ 2181.418856] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2181.426999] env[63024]: DEBUG oslo_vmware.api [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951890, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2181.538034] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d1963b51-736e-4577-a265-5872e0d59006 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "4c39a074-d380-46a3-b1cc-81d72034b743" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.048s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2181.873730] env[63024]: INFO nova.compute.manager [None req-ec764318-d668-46cf-8e0e-823dba733bb9 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Updating instance to original state: 'active' [ 2181.881102] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0e723e70-ea6e-43da-b3d7-d8e0e80aae0b tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "4c39a074-d380-46a3-b1cc-81d72034b743" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2181.881102] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0e723e70-ea6e-43da-b3d7-d8e0e80aae0b tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "4c39a074-d380-46a3-b1cc-81d72034b743" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2181.881102] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0e723e70-ea6e-43da-b3d7-d8e0e80aae0b tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "4c39a074-d380-46a3-b1cc-81d72034b743-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2181.881102] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0e723e70-ea6e-43da-b3d7-d8e0e80aae0b tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "4c39a074-d380-46a3-b1cc-81d72034b743-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2181.881102] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0e723e70-ea6e-43da-b3d7-d8e0e80aae0b tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "4c39a074-d380-46a3-b1cc-81d72034b743-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2181.882764] env[63024]: INFO nova.compute.manager [None req-0e723e70-ea6e-43da-b3d7-d8e0e80aae0b tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Terminating instance [ 2181.928580] env[63024]: DEBUG oslo_vmware.api [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951890, 'name': PowerOffVM_Task, 'duration_secs': 0.427235} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2181.928843] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2181.929508] env[63024]: DEBUG nova.virt.hardware [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2181.929751] env[63024]: DEBUG nova.virt.hardware [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2181.929919] env[63024]: DEBUG nova.virt.hardware [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2181.930121] env[63024]: DEBUG nova.virt.hardware [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2181.930274] env[63024]: DEBUG nova.virt.hardware [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2181.930426] env[63024]: DEBUG nova.virt.hardware [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2181.930635] env[63024]: DEBUG nova.virt.hardware [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2181.930798] env[63024]: DEBUG nova.virt.hardware [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2181.931034] env[63024]: DEBUG nova.virt.hardware [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2181.931170] env[63024]: DEBUG nova.virt.hardware [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2181.931347] env[63024]: DEBUG nova.virt.hardware [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2181.936548] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e0d0e73f-8557-48e7-813f-1e69c906da8b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2181.953214] env[63024]: DEBUG oslo_vmware.api [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2181.953214] env[63024]: value = "task-1951891" [ 2181.953214] env[63024]: _type = "Task" [ 2181.953214] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2181.961536] env[63024]: DEBUG oslo_vmware.api [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951891, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2182.072357] env[63024]: DEBUG nova.compute.manager [req-b0f53df2-7963-4742-84fc-5a516fbc909b req-15f9e69d-bc8c-48b5-a21f-d3395edf008f service nova] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Received event network-vif-deleted-5d2fbce5-45d5-4e27-bc41-6f77e528f245 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2182.072472] env[63024]: INFO nova.compute.manager [req-b0f53df2-7963-4742-84fc-5a516fbc909b req-15f9e69d-bc8c-48b5-a21f-d3395edf008f service nova] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Neutron deleted interface 5d2fbce5-45d5-4e27-bc41-6f77e528f245; detaching it from the instance and deleting it from the info cache [ 2182.072738] env[63024]: DEBUG nova.network.neutron [req-b0f53df2-7963-4742-84fc-5a516fbc909b req-15f9e69d-bc8c-48b5-a21f-d3395edf008f service nova] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2182.386693] env[63024]: DEBUG nova.compute.manager [None req-0e723e70-ea6e-43da-b3d7-d8e0e80aae0b tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2182.386992] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-0e723e70-ea6e-43da-b3d7-d8e0e80aae0b tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2182.387825] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb82631b-6d70-405d-ae46-6ea96e548a6a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2182.396317] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-0e723e70-ea6e-43da-b3d7-d8e0e80aae0b tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2182.396583] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c4273d10-ca63-4a3f-9b52-f450ce8023dc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2182.463013] env[63024]: DEBUG oslo_vmware.api [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951891, 'name': ReconfigVM_Task, 'duration_secs': 0.333453} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2182.464033] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56a59567-330d-4ea0-afc9-9b096814186c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2182.485670] env[63024]: DEBUG nova.virt.hardware [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2182.486035] env[63024]: DEBUG nova.virt.hardware [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2182.486134] env[63024]: DEBUG nova.virt.hardware [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2182.486267] env[63024]: DEBUG nova.virt.hardware [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2182.486413] env[63024]: DEBUG nova.virt.hardware [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2182.486654] env[63024]: DEBUG nova.virt.hardware [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2182.486765] env[63024]: DEBUG nova.virt.hardware [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2182.487043] env[63024]: DEBUG nova.virt.hardware [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2182.487115] env[63024]: DEBUG nova.virt.hardware [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2182.487270] env[63024]: DEBUG nova.virt.hardware [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2182.487445] env[63024]: DEBUG nova.virt.hardware [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2182.488225] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0799f32c-12e3-409d-a7c8-c28c6f1633a0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2182.493365] env[63024]: DEBUG oslo_vmware.api [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2182.493365] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52fabf0d-959f-7a91-0e50-868f00cd66d2" [ 2182.493365] env[63024]: _type = "Task" [ 2182.493365] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2182.504326] env[63024]: DEBUG oslo_vmware.api [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52fabf0d-959f-7a91-0e50-868f00cd66d2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2182.547928] env[63024]: DEBUG nova.network.neutron [-] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2182.575808] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-478dc87b-6890-425c-a4ce-d92c5af2b79a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2182.585248] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73b0cc24-910b-4910-a32c-4a029ec4c900 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2182.614310] env[63024]: DEBUG nova.compute.manager [req-b0f53df2-7963-4742-84fc-5a516fbc909b req-15f9e69d-bc8c-48b5-a21f-d3395edf008f service nova] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Detach interface failed, port_id=5d2fbce5-45d5-4e27-bc41-6f77e528f245, reason: Instance 0d253199-adf8-45c0-a6bf-b11c12b08688 could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 2182.778918] env[63024]: DEBUG nova.network.neutron [-] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2183.003407] env[63024]: DEBUG oslo_vmware.api [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52fabf0d-959f-7a91-0e50-868f00cd66d2, 'name': SearchDatastore_Task, 'duration_secs': 0.008122} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2183.008718] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Reconfiguring VM instance instance-0000006d to detach disk 2000 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2183.009013] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b630a95e-34d2-464c-8efd-3a0ed2786c91 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2183.028786] env[63024]: DEBUG oslo_vmware.api [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2183.028786] env[63024]: value = "task-1951893" [ 2183.028786] env[63024]: _type = "Task" [ 2183.028786] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2183.036685] env[63024]: DEBUG oslo_vmware.api [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951893, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2183.051598] env[63024]: INFO nova.compute.manager [-] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Took 1.71 seconds to deallocate network for instance. [ 2183.072913] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6320ae19-c370-4673-80a0-ca1c463bec39 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "fa326fe2-c00e-4379-954a-9b3275328abc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2183.073175] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6320ae19-c370-4673-80a0-ca1c463bec39 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "fa326fe2-c00e-4379-954a-9b3275328abc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2183.073379] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6320ae19-c370-4673-80a0-ca1c463bec39 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "fa326fe2-c00e-4379-954a-9b3275328abc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2183.073565] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6320ae19-c370-4673-80a0-ca1c463bec39 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "fa326fe2-c00e-4379-954a-9b3275328abc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2183.073735] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6320ae19-c370-4673-80a0-ca1c463bec39 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "fa326fe2-c00e-4379-954a-9b3275328abc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2183.075586] env[63024]: INFO nova.compute.manager [None req-6320ae19-c370-4673-80a0-ca1c463bec39 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Terminating instance [ 2183.282901] env[63024]: INFO nova.compute.manager [-] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Took 1.92 seconds to deallocate network for instance. [ 2183.538514] env[63024]: DEBUG oslo_vmware.api [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951893, 'name': ReconfigVM_Task, 'duration_secs': 0.189518} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2183.538876] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Reconfigured VM instance instance-0000006d to detach disk 2000 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2183.539556] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27ddfc08-eaab-47c2-bde2-ba1a3f926a4a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2183.559954] env[63024]: DEBUG oslo_concurrency.lockutils [None req-796be5ed-bb0c-42a4-a64b-81b73897b8b2 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2183.560229] env[63024]: DEBUG oslo_concurrency.lockutils [None req-796be5ed-bb0c-42a4-a64b-81b73897b8b2 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2183.560450] env[63024]: DEBUG nova.objects.instance [None req-796be5ed-bb0c-42a4-a64b-81b73897b8b2 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lazy-loading 'resources' on Instance uuid 0d253199-adf8-45c0-a6bf-b11c12b08688 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2183.568542] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] cb038d54-b785-4930-b8a5-b309c5f4b58d/cb038d54-b785-4930-b8a5-b309c5f4b58d.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2183.569539] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-570b2d9a-efc9-4331-a4e4-f6f0f1f9a118 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2183.583123] env[63024]: DEBUG nova.compute.manager [None req-6320ae19-c370-4673-80a0-ca1c463bec39 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2183.583337] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-6320ae19-c370-4673-80a0-ca1c463bec39 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2183.584093] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b83bd52-2123-4f00-8265-97569377b9fc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2183.592270] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-6320ae19-c370-4673-80a0-ca1c463bec39 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2183.592497] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2c7ccb23-84c8-404c-9c03-10e8a18442ac {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2183.594824] env[63024]: DEBUG oslo_vmware.api [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2183.594824] env[63024]: value = "task-1951894" [ 2183.594824] env[63024]: _type = "Task" [ 2183.594824] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2183.602543] env[63024]: DEBUG oslo_vmware.api [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951894, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2183.603624] env[63024]: DEBUG oslo_vmware.api [None req-6320ae19-c370-4673-80a0-ca1c463bec39 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 2183.603624] env[63024]: value = "task-1951895" [ 2183.603624] env[63024]: _type = "Task" [ 2183.603624] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2183.610811] env[63024]: DEBUG oslo_vmware.api [None req-6320ae19-c370-4673-80a0-ca1c463bec39 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951895, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2183.789881] env[63024]: DEBUG oslo_concurrency.lockutils [None req-27dc3fb1-4402-4dea-9e09-4390c7f5499e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2184.097504] env[63024]: DEBUG nova.compute.manager [req-fe3577e7-02ac-4b89-a760-78cbb57fab07 req-4226c795-95c8-4023-8df3-09bc461355b9 service nova] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Received event network-vif-deleted-61d84146-ea46-4ba6-ab7a-7e81dec991d2 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2184.110268] env[63024]: DEBUG oslo_vmware.api [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951894, 'name': ReconfigVM_Task, 'duration_secs': 0.327} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2184.110866] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Reconfigured VM instance instance-0000006d to attach disk [datastore1] cb038d54-b785-4930-b8a5-b309c5f4b58d/cb038d54-b785-4930-b8a5-b309c5f4b58d.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2184.111722] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71685eaa-06e4-4d36-8841-c85057901728 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.118722] env[63024]: DEBUG oslo_vmware.api [None req-6320ae19-c370-4673-80a0-ca1c463bec39 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951895, 'name': PowerOffVM_Task, 'duration_secs': 0.154715} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2184.119729] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-6320ae19-c370-4673-80a0-ca1c463bec39 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2184.119882] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-6320ae19-c370-4673-80a0-ca1c463bec39 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2184.120151] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-60c454da-9deb-4e2b-be67-4d70ba971ca7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.140053] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4554a45-97c2-4e88-85a7-ea6c5f2d4a20 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.162927] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c87c01b6-c7ce-4589-9264-346db4add61c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.186790] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d91849bc-3b9a-4776-921b-91da15bbd359 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.193345] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2184.193574] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b7976fe4-3537-47b0-a122-32a3d89aec4d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.200272] env[63024]: DEBUG oslo_vmware.api [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2184.200272] env[63024]: value = "task-1951897" [ 2184.200272] env[63024]: _type = "Task" [ 2184.200272] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2184.208298] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b49ff69f-6aa7-4385-b1be-c5f52e1ff993 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.213339] env[63024]: DEBUG oslo_vmware.api [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951897, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2184.217474] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a94613e4-0099-4178-9b8f-74464317ca1c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.246865] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c42d92a-6df9-44fd-88e5-ccd951205549 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.253761] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8d2b5a7-0c14-4b93-bbeb-6258233488c4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.266518] env[63024]: DEBUG nova.compute.provider_tree [None req-796be5ed-bb0c-42a4-a64b-81b73897b8b2 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2184.710436] env[63024]: DEBUG oslo_vmware.api [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951897, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2184.769709] env[63024]: DEBUG nova.scheduler.client.report [None req-796be5ed-bb0c-42a4-a64b-81b73897b8b2 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2185.211714] env[63024]: DEBUG oslo_vmware.api [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951897, 'name': PowerOnVM_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2185.275170] env[63024]: DEBUG oslo_concurrency.lockutils [None req-796be5ed-bb0c-42a4-a64b-81b73897b8b2 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.715s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2185.278146] env[63024]: DEBUG oslo_concurrency.lockutils [None req-27dc3fb1-4402-4dea-9e09-4390c7f5499e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.488s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2185.279755] env[63024]: DEBUG nova.objects.instance [None req-27dc3fb1-4402-4dea-9e09-4390c7f5499e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lazy-loading 'resources' on Instance uuid 94893f45-fb96-463b-82a9-e2fd884b81f8 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2185.299381] env[63024]: INFO nova.scheduler.client.report [None req-796be5ed-bb0c-42a4-a64b-81b73897b8b2 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Deleted allocations for instance 0d253199-adf8-45c0-a6bf-b11c12b08688 [ 2185.711366] env[63024]: DEBUG oslo_vmware.api [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951897, 'name': PowerOnVM_Task, 'duration_secs': 1.032694} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2185.712450] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2185.805486] env[63024]: DEBUG oslo_concurrency.lockutils [None req-796be5ed-bb0c-42a4-a64b-81b73897b8b2 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "0d253199-adf8-45c0-a6bf-b11c12b08688" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.230s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2185.867881] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d00ccfc-dca4-42e0-9978-2fc8575eead5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.876254] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2e060fc-4b65-4457-9ef3-de20cf1a78a0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.906066] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7e87858-b9a9-4479-9924-ae708dbc87f5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.913410] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6edbac9-498c-4cd8-b4ac-5b162db7cbe7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.926774] env[63024]: DEBUG nova.compute.provider_tree [None req-27dc3fb1-4402-4dea-9e09-4390c7f5499e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2186.430393] env[63024]: DEBUG nova.scheduler.client.report [None req-27dc3fb1-4402-4dea-9e09-4390c7f5499e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2186.751229] env[63024]: INFO nova.compute.manager [None req-3d29a71a-2179-4912-bc1d-66fee32012e7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Updating instance to original state: 'active' [ 2186.935745] env[63024]: DEBUG oslo_concurrency.lockutils [None req-27dc3fb1-4402-4dea-9e09-4390c7f5499e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.658s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2186.956643] env[63024]: INFO nova.scheduler.client.report [None req-27dc3fb1-4402-4dea-9e09-4390c7f5499e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Deleted allocations for instance 94893f45-fb96-463b-82a9-e2fd884b81f8 [ 2187.465544] env[63024]: DEBUG oslo_concurrency.lockutils [None req-27dc3fb1-4402-4dea-9e09-4390c7f5499e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "94893f45-fb96-463b-82a9-e2fd884b81f8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.825s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2187.513387] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-6320ae19-c370-4673-80a0-ca1c463bec39 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2187.513691] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-6320ae19-c370-4673-80a0-ca1c463bec39 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2187.513944] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-6320ae19-c370-4673-80a0-ca1c463bec39 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Deleting the datastore file [datastore1] fa326fe2-c00e-4379-954a-9b3275328abc {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2187.514295] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f0e9780a-5a96-449b-9473-3f078cbdc9d5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.521786] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-0e723e70-ea6e-43da-b3d7-d8e0e80aae0b tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2187.522070] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-0e723e70-ea6e-43da-b3d7-d8e0e80aae0b tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2187.522321] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e723e70-ea6e-43da-b3d7-d8e0e80aae0b tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Deleting the datastore file [datastore1] 4c39a074-d380-46a3-b1cc-81d72034b743 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2187.523855] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-be4fce08-9407-4bca-9e20-8e2cdf978cd2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2187.526307] env[63024]: DEBUG oslo_vmware.api [None req-6320ae19-c370-4673-80a0-ca1c463bec39 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 2187.526307] env[63024]: value = "task-1951899" [ 2187.526307] env[63024]: _type = "Task" [ 2187.526307] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2187.531990] env[63024]: DEBUG oslo_vmware.api [None req-0e723e70-ea6e-43da-b3d7-d8e0e80aae0b tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2187.531990] env[63024]: value = "task-1951900" [ 2187.531990] env[63024]: _type = "Task" [ 2187.531990] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2187.538128] env[63024]: DEBUG oslo_vmware.api [None req-6320ae19-c370-4673-80a0-ca1c463bec39 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951899, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2187.542753] env[63024]: DEBUG oslo_vmware.api [None req-0e723e70-ea6e-43da-b3d7-d8e0e80aae0b tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951900, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2188.039080] env[63024]: DEBUG oslo_vmware.api [None req-6320ae19-c370-4673-80a0-ca1c463bec39 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951899, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144111} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2188.039660] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-6320ae19-c370-4673-80a0-ca1c463bec39 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2188.039853] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-6320ae19-c370-4673-80a0-ca1c463bec39 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2188.040075] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-6320ae19-c370-4673-80a0-ca1c463bec39 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2188.040261] env[63024]: INFO nova.compute.manager [None req-6320ae19-c370-4673-80a0-ca1c463bec39 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Took 4.46 seconds to destroy the instance on the hypervisor. [ 2188.040496] env[63024]: DEBUG oslo.service.loopingcall [None req-6320ae19-c370-4673-80a0-ca1c463bec39 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2188.040682] env[63024]: DEBUG nova.compute.manager [-] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2188.040778] env[63024]: DEBUG nova.network.neutron [-] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2188.045254] env[63024]: DEBUG oslo_vmware.api [None req-0e723e70-ea6e-43da-b3d7-d8e0e80aae0b tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951900, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.131419} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2188.045749] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-0e723e70-ea6e-43da-b3d7-d8e0e80aae0b tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2188.045959] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-0e723e70-ea6e-43da-b3d7-d8e0e80aae0b tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2188.046124] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-0e723e70-ea6e-43da-b3d7-d8e0e80aae0b tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2188.046325] env[63024]: INFO nova.compute.manager [None req-0e723e70-ea6e-43da-b3d7-d8e0e80aae0b tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Took 5.66 seconds to destroy the instance on the hypervisor. [ 2188.046519] env[63024]: DEBUG oslo.service.loopingcall [None req-0e723e70-ea6e-43da-b3d7-d8e0e80aae0b tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2188.046689] env[63024]: DEBUG nova.compute.manager [-] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2188.046827] env[63024]: DEBUG nova.network.neutron [-] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2188.098785] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2dfa6c29-b3c3-4725-abfd-b07f5eae4298 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "cb038d54-b785-4930-b8a5-b309c5f4b58d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2188.099051] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2dfa6c29-b3c3-4725-abfd-b07f5eae4298 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "cb038d54-b785-4930-b8a5-b309c5f4b58d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2188.099269] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2dfa6c29-b3c3-4725-abfd-b07f5eae4298 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "cb038d54-b785-4930-b8a5-b309c5f4b58d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2188.099454] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2dfa6c29-b3c3-4725-abfd-b07f5eae4298 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "cb038d54-b785-4930-b8a5-b309c5f4b58d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2188.099622] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2dfa6c29-b3c3-4725-abfd-b07f5eae4298 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "cb038d54-b785-4930-b8a5-b309c5f4b58d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2188.101558] env[63024]: INFO nova.compute.manager [None req-2dfa6c29-b3c3-4725-abfd-b07f5eae4298 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Terminating instance [ 2188.605453] env[63024]: DEBUG nova.compute.manager [None req-2dfa6c29-b3c3-4725-abfd-b07f5eae4298 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2188.605837] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dfa6c29-b3c3-4725-abfd-b07f5eae4298 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2188.606194] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c9d50ab8-fd65-44ba-808b-6a65616f7e0e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2188.614252] env[63024]: DEBUG oslo_vmware.api [None req-2dfa6c29-b3c3-4725-abfd-b07f5eae4298 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2188.614252] env[63024]: value = "task-1951901" [ 2188.614252] env[63024]: _type = "Task" [ 2188.614252] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2188.625255] env[63024]: DEBUG oslo_vmware.api [None req-2dfa6c29-b3c3-4725-abfd-b07f5eae4298 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951901, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2188.933892] env[63024]: DEBUG nova.compute.manager [req-776f4d2b-615b-4271-b798-e41d73b4989f req-04dba362-6d5b-4fdf-b1b0-e71c3f0627d3 service nova] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Received event network-vif-deleted-94e31c56-bc4b-4f1b-9a4b-4311f832022a {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2188.933892] env[63024]: INFO nova.compute.manager [req-776f4d2b-615b-4271-b798-e41d73b4989f req-04dba362-6d5b-4fdf-b1b0-e71c3f0627d3 service nova] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Neutron deleted interface 94e31c56-bc4b-4f1b-9a4b-4311f832022a; detaching it from the instance and deleting it from the info cache [ 2188.934084] env[63024]: DEBUG nova.network.neutron [req-776f4d2b-615b-4271-b798-e41d73b4989f req-04dba362-6d5b-4fdf-b1b0-e71c3f0627d3 service nova] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2189.024112] env[63024]: DEBUG nova.network.neutron [-] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2189.127849] env[63024]: DEBUG oslo_vmware.api [None req-2dfa6c29-b3c3-4725-abfd-b07f5eae4298 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951901, 'name': PowerOffVM_Task, 'duration_secs': 0.191058} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2189.128363] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dfa6c29-b3c3-4725-abfd-b07f5eae4298 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2189.128566] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-2dfa6c29-b3c3-4725-abfd-b07f5eae4298 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Volume detach. Driver type: vmdk {{(pid=63024) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2189.128757] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-2dfa6c29-b3c3-4725-abfd-b07f5eae4298 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402259', 'volume_id': '6a11f033-eb79-45c4-ad3c-793e965649b3', 'name': 'volume-6a11f033-eb79-45c4-ad3c-793e965649b3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': 'cb038d54-b785-4930-b8a5-b309c5f4b58d', 'attached_at': '2024-12-22T11:17:08.000000', 'detached_at': '', 'volume_id': '6a11f033-eb79-45c4-ad3c-793e965649b3', 'serial': '6a11f033-eb79-45c4-ad3c-793e965649b3'} {{(pid=63024) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2189.129797] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de654833-7deb-49a5-ae35-4308abac3370 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.154503] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53884352-082c-4f26-8af5-586015883872 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.162313] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe3fb63e-c63a-4ad8-9924-ee6cf41356c4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.183667] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-885bba44-f57b-422c-b7cb-5ed2e91a1ab5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.201270] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-2dfa6c29-b3c3-4725-abfd-b07f5eae4298 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] The volume has not been displaced from its original location: [datastore1] volume-6a11f033-eb79-45c4-ad3c-793e965649b3/volume-6a11f033-eb79-45c4-ad3c-793e965649b3.vmdk. No consolidation needed. {{(pid=63024) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2189.206554] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-2dfa6c29-b3c3-4725-abfd-b07f5eae4298 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Reconfiguring VM instance instance-0000006d to detach disk 2001 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2189.206871] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-31294a95-df2f-4083-9ae9-b14025e0ac02 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.227896] env[63024]: DEBUG oslo_vmware.api [None req-2dfa6c29-b3c3-4725-abfd-b07f5eae4298 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2189.227896] env[63024]: value = "task-1951902" [ 2189.227896] env[63024]: _type = "Task" [ 2189.227896] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2189.236342] env[63024]: DEBUG oslo_vmware.api [None req-2dfa6c29-b3c3-4725-abfd-b07f5eae4298 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951902, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2189.306844] env[63024]: DEBUG nova.network.neutron [-] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2189.437563] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-78e20110-fda8-4443-a388-7cad866a6623 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.446448] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32e7b9b6-6499-4eff-b2d8-81ec64619d78 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.473975] env[63024]: DEBUG nova.compute.manager [req-776f4d2b-615b-4271-b798-e41d73b4989f req-04dba362-6d5b-4fdf-b1b0-e71c3f0627d3 service nova] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Detach interface failed, port_id=94e31c56-bc4b-4f1b-9a4b-4311f832022a, reason: Instance 4c39a074-d380-46a3-b1cc-81d72034b743 could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 2189.503523] env[63024]: DEBUG oslo_concurrency.lockutils [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquiring lock "14bafeba-9f5b-4488-b29c-38939973deb9" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2189.503523] env[63024]: DEBUG oslo_concurrency.lockutils [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lock "14bafeba-9f5b-4488-b29c-38939973deb9" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2189.503523] env[63024]: INFO nova.compute.manager [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Shelving [ 2189.517035] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquiring lock "5919cc21-67b8-47d4-9909-bc972b42914d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2189.517151] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "5919cc21-67b8-47d4-9909-bc972b42914d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2189.526668] env[63024]: INFO nova.compute.manager [-] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Took 1.48 seconds to deallocate network for instance. [ 2189.629281] env[63024]: DEBUG oslo_concurrency.lockutils [None req-95cfda9e-d675-4cef-b891-7a286c848388 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "e8ad74ce-7862-4574-98e7-14bc54bd5d6c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2189.629565] env[63024]: DEBUG oslo_concurrency.lockutils [None req-95cfda9e-d675-4cef-b891-7a286c848388 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "e8ad74ce-7862-4574-98e7-14bc54bd5d6c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2189.629779] env[63024]: DEBUG oslo_concurrency.lockutils [None req-95cfda9e-d675-4cef-b891-7a286c848388 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "e8ad74ce-7862-4574-98e7-14bc54bd5d6c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2189.629979] env[63024]: DEBUG oslo_concurrency.lockutils [None req-95cfda9e-d675-4cef-b891-7a286c848388 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "e8ad74ce-7862-4574-98e7-14bc54bd5d6c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2189.630170] env[63024]: DEBUG oslo_concurrency.lockutils [None req-95cfda9e-d675-4cef-b891-7a286c848388 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "e8ad74ce-7862-4574-98e7-14bc54bd5d6c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2189.632233] env[63024]: INFO nova.compute.manager [None req-95cfda9e-d675-4cef-b891-7a286c848388 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Terminating instance [ 2189.738034] env[63024]: DEBUG oslo_vmware.api [None req-2dfa6c29-b3c3-4725-abfd-b07f5eae4298 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951902, 'name': ReconfigVM_Task, 'duration_secs': 0.195045} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2189.738320] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-2dfa6c29-b3c3-4725-abfd-b07f5eae4298 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Reconfigured VM instance instance-0000006d to detach disk 2001 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2189.742851] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4b4d12f7-afa0-4ecc-ba34-d7393d3e98ae {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2189.757711] env[63024]: DEBUG oslo_vmware.api [None req-2dfa6c29-b3c3-4725-abfd-b07f5eae4298 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2189.757711] env[63024]: value = "task-1951903" [ 2189.757711] env[63024]: _type = "Task" [ 2189.757711] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2189.765309] env[63024]: DEBUG oslo_vmware.api [None req-2dfa6c29-b3c3-4725-abfd-b07f5eae4298 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951903, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2189.809717] env[63024]: INFO nova.compute.manager [-] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Took 1.77 seconds to deallocate network for instance. [ 2190.019152] env[63024]: DEBUG nova.compute.manager [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2190.031973] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0e723e70-ea6e-43da-b3d7-d8e0e80aae0b tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2190.031973] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0e723e70-ea6e-43da-b3d7-d8e0e80aae0b tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2190.032162] env[63024]: DEBUG nova.objects.instance [None req-0e723e70-ea6e-43da-b3d7-d8e0e80aae0b tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lazy-loading 'resources' on Instance uuid 4c39a074-d380-46a3-b1cc-81d72034b743 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2190.136047] env[63024]: DEBUG nova.compute.manager [None req-95cfda9e-d675-4cef-b891-7a286c848388 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2190.136352] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-95cfda9e-d675-4cef-b891-7a286c848388 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2190.137230] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95542ed1-be8d-4b69-87a6-97558e447902 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.145413] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-95cfda9e-d675-4cef-b891-7a286c848388 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2190.145666] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a9144f76-2b95-47d4-8fdc-e21822c1e533 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.152455] env[63024]: DEBUG oslo_vmware.api [None req-95cfda9e-d675-4cef-b891-7a286c848388 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 2190.152455] env[63024]: value = "task-1951904" [ 2190.152455] env[63024]: _type = "Task" [ 2190.152455] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2190.160726] env[63024]: DEBUG oslo_vmware.api [None req-95cfda9e-d675-4cef-b891-7a286c848388 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951904, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2190.267529] env[63024]: DEBUG oslo_vmware.api [None req-2dfa6c29-b3c3-4725-abfd-b07f5eae4298 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951903, 'name': ReconfigVM_Task, 'duration_secs': 0.159961} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2190.267908] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-2dfa6c29-b3c3-4725-abfd-b07f5eae4298 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402259', 'volume_id': '6a11f033-eb79-45c4-ad3c-793e965649b3', 'name': 'volume-6a11f033-eb79-45c4-ad3c-793e965649b3', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': 'cb038d54-b785-4930-b8a5-b309c5f4b58d', 'attached_at': '2024-12-22T11:17:08.000000', 'detached_at': '', 'volume_id': '6a11f033-eb79-45c4-ad3c-793e965649b3', 'serial': '6a11f033-eb79-45c4-ad3c-793e965649b3'} {{(pid=63024) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2190.268259] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-2dfa6c29-b3c3-4725-abfd-b07f5eae4298 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2190.269181] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f028924-1e3a-4fcd-be20-af4a0030d195 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.276050] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-2dfa6c29-b3c3-4725-abfd-b07f5eae4298 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2190.276339] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-030a3436-2cdd-4711-9f35-9a973dd9bda2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.316814] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6320ae19-c370-4673-80a0-ca1c463bec39 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2190.356993] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-2dfa6c29-b3c3-4725-abfd-b07f5eae4298 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2190.357317] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-2dfa6c29-b3c3-4725-abfd-b07f5eae4298 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2190.357547] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-2dfa6c29-b3c3-4725-abfd-b07f5eae4298 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Deleting the datastore file [datastore1] cb038d54-b785-4930-b8a5-b309c5f4b58d {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2190.357857] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-77f99c37-4e98-46d5-8af7-f93dcd665e70 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.365672] env[63024]: DEBUG oslo_vmware.api [None req-2dfa6c29-b3c3-4725-abfd-b07f5eae4298 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2190.365672] env[63024]: value = "task-1951906" [ 2190.365672] env[63024]: _type = "Task" [ 2190.365672] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2190.374559] env[63024]: DEBUG oslo_vmware.api [None req-2dfa6c29-b3c3-4725-abfd-b07f5eae4298 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951906, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2190.513751] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2190.514053] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c2c3212d-ccfc-44f4-ab36-30aaa78ad07a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.520690] env[63024]: DEBUG oslo_vmware.api [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2190.520690] env[63024]: value = "task-1951907" [ 2190.520690] env[63024]: _type = "Task" [ 2190.520690] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2190.533502] env[63024]: DEBUG oslo_vmware.api [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951907, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2190.547363] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2190.628161] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbcc27e4-ff7a-4632-8d8b-d4e1130aacbb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.638773] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ab055f1-8706-46ff-a66b-9910817be780 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.677298] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-454391ff-4cab-45e9-9462-598e8571a76b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.686195] env[63024]: DEBUG oslo_vmware.api [None req-95cfda9e-d675-4cef-b891-7a286c848388 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951904, 'name': PowerOffVM_Task, 'duration_secs': 0.228732} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2190.688403] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-95cfda9e-d675-4cef-b891-7a286c848388 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2190.688590] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-95cfda9e-d675-4cef-b891-7a286c848388 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2190.688891] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ea450396-9b4c-4995-9cb9-bc9b32229181 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.691352] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1dc73ac-f767-4591-a410-89679b3da9b6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.706125] env[63024]: DEBUG nova.compute.provider_tree [None req-0e723e70-ea6e-43da-b3d7-d8e0e80aae0b tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2190.815870] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-95cfda9e-d675-4cef-b891-7a286c848388 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2190.816094] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-95cfda9e-d675-4cef-b891-7a286c848388 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2190.816228] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-95cfda9e-d675-4cef-b891-7a286c848388 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Deleting the datastore file [datastore1] e8ad74ce-7862-4574-98e7-14bc54bd5d6c {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2190.816494] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4cc6c0fd-1b13-438c-a94f-37af251e25f0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2190.822766] env[63024]: DEBUG oslo_vmware.api [None req-95cfda9e-d675-4cef-b891-7a286c848388 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for the task: (returnval){ [ 2190.822766] env[63024]: value = "task-1951909" [ 2190.822766] env[63024]: _type = "Task" [ 2190.822766] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2190.831211] env[63024]: DEBUG oslo_vmware.api [None req-95cfda9e-d675-4cef-b891-7a286c848388 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951909, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2190.875365] env[63024]: DEBUG oslo_vmware.api [None req-2dfa6c29-b3c3-4725-abfd-b07f5eae4298 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951906, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146332} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2190.875605] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-2dfa6c29-b3c3-4725-abfd-b07f5eae4298 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2190.875795] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-2dfa6c29-b3c3-4725-abfd-b07f5eae4298 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2190.875976] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-2dfa6c29-b3c3-4725-abfd-b07f5eae4298 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2190.876171] env[63024]: INFO nova.compute.manager [None req-2dfa6c29-b3c3-4725-abfd-b07f5eae4298 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Took 2.27 seconds to destroy the instance on the hypervisor. [ 2190.876418] env[63024]: DEBUG oslo.service.loopingcall [None req-2dfa6c29-b3c3-4725-abfd-b07f5eae4298 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2190.876624] env[63024]: DEBUG nova.compute.manager [-] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2190.876706] env[63024]: DEBUG nova.network.neutron [-] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2190.961702] env[63024]: DEBUG nova.compute.manager [req-699469fd-05df-4314-898e-46d8374ccb97 req-5da236f0-7b31-4095-beac-aee3bc3991d4 service nova] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Received event network-vif-deleted-a13f8ab3-d900-447f-8772-5be6b3d48296 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2191.030766] env[63024]: DEBUG oslo_vmware.api [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951907, 'name': PowerOffVM_Task, 'duration_secs': 0.192165} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2191.031041] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2191.031813] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dbed7f0-c6cb-4483-840e-db5ee2623939 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.049739] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d66acc2b-fa3f-4393-8ea1-a9c9b99c2f52 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.226945] env[63024]: ERROR nova.scheduler.client.report [None req-0e723e70-ea6e-43da-b3d7-d8e0e80aae0b tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [req-c4033867-4616-4277-bc5c-b824559473ab] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 89dfa68a-133e-436f-a9f1-86051f9fb96b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c4033867-4616-4277-bc5c-b824559473ab"}]} [ 2191.243796] env[63024]: DEBUG nova.scheduler.client.report [None req-0e723e70-ea6e-43da-b3d7-d8e0e80aae0b tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Refreshing inventories for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 2191.259076] env[63024]: DEBUG nova.scheduler.client.report [None req-0e723e70-ea6e-43da-b3d7-d8e0e80aae0b tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Updating ProviderTree inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 2191.259366] env[63024]: DEBUG nova.compute.provider_tree [None req-0e723e70-ea6e-43da-b3d7-d8e0e80aae0b tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2191.270870] env[63024]: DEBUG nova.scheduler.client.report [None req-0e723e70-ea6e-43da-b3d7-d8e0e80aae0b tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Refreshing aggregate associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, aggregates: None {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 2191.291032] env[63024]: DEBUG nova.scheduler.client.report [None req-0e723e70-ea6e-43da-b3d7-d8e0e80aae0b tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Refreshing trait associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 2191.333046] env[63024]: DEBUG oslo_vmware.api [None req-95cfda9e-d675-4cef-b891-7a286c848388 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Task: {'id': task-1951909, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140238} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2191.333311] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-95cfda9e-d675-4cef-b891-7a286c848388 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2191.333509] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-95cfda9e-d675-4cef-b891-7a286c848388 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2191.333677] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-95cfda9e-d675-4cef-b891-7a286c848388 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2191.333848] env[63024]: INFO nova.compute.manager [None req-95cfda9e-d675-4cef-b891-7a286c848388 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Took 1.20 seconds to destroy the instance on the hypervisor. [ 2191.334099] env[63024]: DEBUG oslo.service.loopingcall [None req-95cfda9e-d675-4cef-b891-7a286c848388 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2191.334294] env[63024]: DEBUG nova.compute.manager [-] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2191.334389] env[63024]: DEBUG nova.network.neutron [-] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2191.374556] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-123d80db-1a4b-47ca-afbb-65166d313953 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.382256] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08f4f942-16db-46ec-8f49-cb1efb1ef8fb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.414538] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b7081d8-5849-4f0c-b9dd-118444d9a3ae {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.422476] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e85162de-1b5f-48df-8c37-ed78005c8c6f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.437595] env[63024]: DEBUG nova.compute.provider_tree [None req-0e723e70-ea6e-43da-b3d7-d8e0e80aae0b tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2191.563553] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Creating Snapshot of the VM instance {{(pid=63024) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2191.563553] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-2b41ff7e-5ba7-439a-af26-93c7c2e78a7d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2191.572562] env[63024]: DEBUG oslo_vmware.api [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2191.572562] env[63024]: value = "task-1951910" [ 2191.572562] env[63024]: _type = "Task" [ 2191.572562] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2191.587264] env[63024]: DEBUG oslo_vmware.api [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951910, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2191.813046] env[63024]: DEBUG nova.network.neutron [-] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2191.971085] env[63024]: DEBUG nova.scheduler.client.report [None req-0e723e70-ea6e-43da-b3d7-d8e0e80aae0b tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Updated inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with generation 180 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 2191.971434] env[63024]: DEBUG nova.compute.provider_tree [None req-0e723e70-ea6e-43da-b3d7-d8e0e80aae0b tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Updating resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b generation from 180 to 181 during operation: update_inventory {{(pid=63024) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2191.971657] env[63024]: DEBUG nova.compute.provider_tree [None req-0e723e70-ea6e-43da-b3d7-d8e0e80aae0b tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2192.082907] env[63024]: DEBUG nova.network.neutron [-] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2192.084173] env[63024]: DEBUG oslo_vmware.api [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951910, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2192.316103] env[63024]: INFO nova.compute.manager [-] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Took 1.44 seconds to deallocate network for instance. [ 2192.477311] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0e723e70-ea6e-43da-b3d7-d8e0e80aae0b tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.445s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2192.479697] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6320ae19-c370-4673-80a0-ca1c463bec39 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.163s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2192.479915] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6320ae19-c370-4673-80a0-ca1c463bec39 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2192.481618] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.934s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2192.483169] env[63024]: INFO nova.compute.claims [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2192.498595] env[63024]: INFO nova.scheduler.client.report [None req-0e723e70-ea6e-43da-b3d7-d8e0e80aae0b tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Deleted allocations for instance 4c39a074-d380-46a3-b1cc-81d72034b743 [ 2192.504788] env[63024]: INFO nova.scheduler.client.report [None req-6320ae19-c370-4673-80a0-ca1c463bec39 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Deleted allocations for instance fa326fe2-c00e-4379-954a-9b3275328abc [ 2192.584612] env[63024]: DEBUG oslo_vmware.api [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951910, 'name': CreateSnapshot_Task, 'duration_secs': 0.565708} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2192.585076] env[63024]: INFO nova.compute.manager [-] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Took 1.25 seconds to deallocate network for instance. [ 2192.585390] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Created Snapshot of the VM instance {{(pid=63024) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2192.587617] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f598a436-7d84-4a58-b5d4-ba568f418a50 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2192.862870] env[63024]: INFO nova.compute.manager [None req-2dfa6c29-b3c3-4725-abfd-b07f5eae4298 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Took 0.55 seconds to detach 1 volumes for instance. [ 2193.006136] env[63024]: DEBUG oslo_concurrency.lockutils [None req-0e723e70-ea6e-43da-b3d7-d8e0e80aae0b tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "4c39a074-d380-46a3-b1cc-81d72034b743" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.126s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2193.014766] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6320ae19-c370-4673-80a0-ca1c463bec39 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "fa326fe2-c00e-4379-954a-9b3275328abc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.941s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2193.096556] env[63024]: DEBUG oslo_concurrency.lockutils [None req-95cfda9e-d675-4cef-b891-7a286c848388 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2193.107580] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Creating linked-clone VM from snapshot {{(pid=63024) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2193.108299] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-1a7f9b70-d830-4084-8b26-651b50eb7c73 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2193.117743] env[63024]: DEBUG oslo_vmware.api [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2193.117743] env[63024]: value = "task-1951911" [ 2193.117743] env[63024]: _type = "Task" [ 2193.117743] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2193.126376] env[63024]: DEBUG oslo_vmware.api [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951911, 'name': CloneVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2193.218283] env[63024]: DEBUG nova.compute.manager [req-a5dc43f9-de5e-4752-bee3-4266dc75f32b req-d8a4fd00-f720-43b9-88c6-a47788a26462 service nova] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Received event network-vif-deleted-f5649c3a-8bab-4abb-a1a2-1d88b780eba2 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2193.218575] env[63024]: DEBUG nova.compute.manager [req-a5dc43f9-de5e-4752-bee3-4266dc75f32b req-d8a4fd00-f720-43b9-88c6-a47788a26462 service nova] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Received event network-vif-deleted-209c547a-fef6-4e81-9221-59b72099faa5 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2193.368806] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2dfa6c29-b3c3-4725-abfd-b07f5eae4298 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2193.574153] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-830bcc34-addc-4f56-a7fb-96b0acd15bbd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2193.582216] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95fab077-0c93-42af-b2b1-87c6463615d3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2193.615140] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b721f318-04a4-402b-b865-ff462363c4fc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2193.625150] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8ac3a67-95c2-4e6d-84c2-2afb790b3077 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2193.632045] env[63024]: DEBUG oslo_vmware.api [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951911, 'name': CloneVM_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2193.641693] env[63024]: DEBUG nova.compute.provider_tree [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2193.799226] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "8901e234-22a9-4523-8658-411aa19e01e0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2193.799460] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "8901e234-22a9-4523-8658-411aa19e01e0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2193.888106] env[63024]: DEBUG oslo_concurrency.lockutils [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "233d087b-923e-46d4-a47f-b024583ce0f8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2193.888318] env[63024]: DEBUG oslo_concurrency.lockutils [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "233d087b-923e-46d4-a47f-b024583ce0f8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2194.128884] env[63024]: DEBUG oslo_vmware.api [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951911, 'name': CloneVM_Task} progress is 95%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2194.144936] env[63024]: DEBUG nova.scheduler.client.report [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2194.302117] env[63024]: DEBUG nova.compute.manager [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2194.391026] env[63024]: DEBUG nova.compute.manager [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2194.629886] env[63024]: DEBUG oslo_vmware.api [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951911, 'name': CloneVM_Task, 'duration_secs': 1.468614} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2194.629886] env[63024]: INFO nova.virt.vmwareapi.vmops [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Created linked-clone VM from snapshot [ 2194.630485] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac98d087-208a-4be7-a13b-28a77cf0f3df {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.639714] env[63024]: DEBUG nova.virt.vmwareapi.images [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Uploading image bca855b4-4ff0-428a-971b-15d087bf0880 {{(pid=63024) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2194.649560] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.168s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2194.650268] env[63024]: DEBUG nova.compute.manager [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2194.652701] env[63024]: DEBUG oslo_concurrency.lockutils [None req-95cfda9e-d675-4cef-b891-7a286c848388 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.556s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2194.652917] env[63024]: DEBUG nova.objects.instance [None req-95cfda9e-d675-4cef-b891-7a286c848388 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lazy-loading 'resources' on Instance uuid e8ad74ce-7862-4574-98e7-14bc54bd5d6c {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2194.667566] env[63024]: DEBUG oslo_vmware.rw_handles [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 2194.667566] env[63024]: value = "vm-402266" [ 2194.667566] env[63024]: _type = "VirtualMachine" [ 2194.667566] env[63024]: }. {{(pid=63024) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 2194.667881] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-1a7e0fca-6268-4fe8-8fc0-54191004dc24 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2194.675383] env[63024]: DEBUG oslo_vmware.rw_handles [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lease: (returnval){ [ 2194.675383] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]524ac21a-9c66-6291-d06e-348ce0dd6da6" [ 2194.675383] env[63024]: _type = "HttpNfcLease" [ 2194.675383] env[63024]: } obtained for exporting VM: (result){ [ 2194.675383] env[63024]: value = "vm-402266" [ 2194.675383] env[63024]: _type = "VirtualMachine" [ 2194.675383] env[63024]: }. {{(pid=63024) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 2194.675699] env[63024]: DEBUG oslo_vmware.api [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the lease: (returnval){ [ 2194.675699] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]524ac21a-9c66-6291-d06e-348ce0dd6da6" [ 2194.675699] env[63024]: _type = "HttpNfcLease" [ 2194.675699] env[63024]: } to be ready. {{(pid=63024) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2194.682720] env[63024]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2194.682720] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]524ac21a-9c66-6291-d06e-348ce0dd6da6" [ 2194.682720] env[63024]: _type = "HttpNfcLease" [ 2194.682720] env[63024]: } is initializing. {{(pid=63024) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2194.824127] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2194.909364] env[63024]: DEBUG oslo_concurrency.lockutils [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2195.156165] env[63024]: DEBUG nova.compute.utils [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2195.160781] env[63024]: DEBUG nova.compute.manager [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2195.160946] env[63024]: DEBUG nova.network.neutron [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2195.183675] env[63024]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2195.183675] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]524ac21a-9c66-6291-d06e-348ce0dd6da6" [ 2195.183675] env[63024]: _type = "HttpNfcLease" [ 2195.183675] env[63024]: } is ready. {{(pid=63024) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2195.184017] env[63024]: DEBUG oslo_vmware.rw_handles [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2195.184017] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]524ac21a-9c66-6291-d06e-348ce0dd6da6" [ 2195.184017] env[63024]: _type = "HttpNfcLease" [ 2195.184017] env[63024]: }. {{(pid=63024) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 2195.184668] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ff3dadd-4288-49d2-9b53-8cdc0496128b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.194356] env[63024]: DEBUG oslo_vmware.rw_handles [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f5462c-09c1-8edd-5437-ea64ab3bb9f9/disk-0.vmdk from lease info. {{(pid=63024) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2195.194532] env[63024]: DEBUG oslo_vmware.rw_handles [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f5462c-09c1-8edd-5437-ea64ab3bb9f9/disk-0.vmdk for reading. {{(pid=63024) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 2195.253878] env[63024]: DEBUG nova.policy [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f215c99539cd43039ffdb0c6cf70beaf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0d0715f0ccbd49ec8af8e3049d970994', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 2195.286311] env[63024]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-bd4d7e1c-e223-4e98-a515-c5f21889dc1c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.300082] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d87412b2-ba61-4a16-a42e-db2e4953a82b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.313466] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1602f0a-5603-49b2-8918-27b4e898d17e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.343173] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc3208c2-4f18-4887-b653-b0ba1397c314 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.350607] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f7debb3-e287-4977-bada-90ccca6d201e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2195.366048] env[63024]: DEBUG nova.compute.provider_tree [None req-95cfda9e-d675-4cef-b891-7a286c848388 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2195.567943] env[63024]: DEBUG nova.network.neutron [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Successfully created port: f3bc6bf4-4559-4a56-b6cb-c71a44d6651b {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2195.661722] env[63024]: DEBUG nova.compute.manager [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2195.868770] env[63024]: DEBUG nova.scheduler.client.report [None req-95cfda9e-d675-4cef-b891-7a286c848388 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2196.374261] env[63024]: DEBUG oslo_concurrency.lockutils [None req-95cfda9e-d675-4cef-b891-7a286c848388 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.721s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2196.377251] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2dfa6c29-b3c3-4725-abfd-b07f5eae4298 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.009s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2196.377528] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2dfa6c29-b3c3-4725-abfd-b07f5eae4298 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2196.379404] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.555s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2196.380976] env[63024]: INFO nova.compute.claims [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2196.396851] env[63024]: INFO nova.scheduler.client.report [None req-95cfda9e-d675-4cef-b891-7a286c848388 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Deleted allocations for instance e8ad74ce-7862-4574-98e7-14bc54bd5d6c [ 2196.398665] env[63024]: INFO nova.scheduler.client.report [None req-2dfa6c29-b3c3-4725-abfd-b07f5eae4298 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Deleted allocations for instance cb038d54-b785-4930-b8a5-b309c5f4b58d [ 2196.672452] env[63024]: DEBUG nova.compute.manager [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2196.696860] env[63024]: DEBUG nova.virt.hardware [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2196.697293] env[63024]: DEBUG nova.virt.hardware [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2196.697775] env[63024]: DEBUG nova.virt.hardware [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2196.698042] env[63024]: DEBUG nova.virt.hardware [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2196.698212] env[63024]: DEBUG nova.virt.hardware [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2196.698403] env[63024]: DEBUG nova.virt.hardware [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2196.698653] env[63024]: DEBUG nova.virt.hardware [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2196.698854] env[63024]: DEBUG nova.virt.hardware [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2196.699077] env[63024]: DEBUG nova.virt.hardware [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2196.699288] env[63024]: DEBUG nova.virt.hardware [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2196.699505] env[63024]: DEBUG nova.virt.hardware [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2196.700420] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-647a9178-f978-4e25-a474-315c10e2488e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2196.708992] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7411b93d-3ece-4683-aeb9-383e5962987b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2196.909085] env[63024]: DEBUG oslo_concurrency.lockutils [None req-95cfda9e-d675-4cef-b891-7a286c848388 tempest-ServerActionsTestOtherA-322542459 tempest-ServerActionsTestOtherA-322542459-project-member] Lock "e8ad74ce-7862-4574-98e7-14bc54bd5d6c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.279s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2196.910836] env[63024]: DEBUG oslo_concurrency.lockutils [None req-2dfa6c29-b3c3-4725-abfd-b07f5eae4298 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "cb038d54-b785-4930-b8a5-b309c5f4b58d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.812s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2197.069922] env[63024]: DEBUG nova.compute.manager [req-16e2a019-b85c-4a60-8a6c-462b6b1a014d req-d276ede4-f03f-41b5-9572-51d4330fa593 service nova] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Received event network-vif-plugged-f3bc6bf4-4559-4a56-b6cb-c71a44d6651b {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2197.070177] env[63024]: DEBUG oslo_concurrency.lockutils [req-16e2a019-b85c-4a60-8a6c-462b6b1a014d req-d276ede4-f03f-41b5-9572-51d4330fa593 service nova] Acquiring lock "5919cc21-67b8-47d4-9909-bc972b42914d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2197.070503] env[63024]: DEBUG oslo_concurrency.lockutils [req-16e2a019-b85c-4a60-8a6c-462b6b1a014d req-d276ede4-f03f-41b5-9572-51d4330fa593 service nova] Lock "5919cc21-67b8-47d4-9909-bc972b42914d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2197.070839] env[63024]: DEBUG oslo_concurrency.lockutils [req-16e2a019-b85c-4a60-8a6c-462b6b1a014d req-d276ede4-f03f-41b5-9572-51d4330fa593 service nova] Lock "5919cc21-67b8-47d4-9909-bc972b42914d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2197.071206] env[63024]: DEBUG nova.compute.manager [req-16e2a019-b85c-4a60-8a6c-462b6b1a014d req-d276ede4-f03f-41b5-9572-51d4330fa593 service nova] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] No waiting events found dispatching network-vif-plugged-f3bc6bf4-4559-4a56-b6cb-c71a44d6651b {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2197.071206] env[63024]: WARNING nova.compute.manager [req-16e2a019-b85c-4a60-8a6c-462b6b1a014d req-d276ede4-f03f-41b5-9572-51d4330fa593 service nova] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Received unexpected event network-vif-plugged-f3bc6bf4-4559-4a56-b6cb-c71a44d6651b for instance with vm_state building and task_state spawning. [ 2197.186284] env[63024]: DEBUG nova.network.neutron [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Successfully updated port: f3bc6bf4-4559-4a56-b6cb-c71a44d6651b {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2197.473664] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a7a2622-49ab-4739-a3de-3a55074593d8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.486584] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aaec63a-598f-415e-8e77-2ee5b1825048 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.539347] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbb2dfd9-3880-4ab4-93b9-04fa8bee64d6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.549379] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2b6fce9-0406-4d0c-92bf-9fc8402a493b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2197.565462] env[63024]: DEBUG nova.compute.provider_tree [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2197.692280] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquiring lock "refresh_cache-5919cc21-67b8-47d4-9909-bc972b42914d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2197.692531] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquired lock "refresh_cache-5919cc21-67b8-47d4-9909-bc972b42914d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2197.692583] env[63024]: DEBUG nova.network.neutron [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2197.805650] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "7d78b891-34c0-46dd-8b0d-ce80517232e1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2197.805946] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "7d78b891-34c0-46dd-8b0d-ce80517232e1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2198.069289] env[63024]: DEBUG nova.scheduler.client.report [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2198.225322] env[63024]: DEBUG nova.network.neutron [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2198.309361] env[63024]: DEBUG nova.compute.manager [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2198.369031] env[63024]: DEBUG nova.network.neutron [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Updating instance_info_cache with network_info: [{"id": "f3bc6bf4-4559-4a56-b6cb-c71a44d6651b", "address": "fa:16:3e:a5:49:82", "network": {"id": "18684658-e754-4649-b059-43f84e447803", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-48651862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d0715f0ccbd49ec8af8e3049d970994", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98011432-48cc-4ffd-a5a8-b96d2ea4424a", "external-id": "nsx-vlan-transportzone-745", "segmentation_id": 745, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3bc6bf4-45", "ovs_interfaceid": "f3bc6bf4-4559-4a56-b6cb-c71a44d6651b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2198.574602] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.195s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2198.575340] env[63024]: DEBUG nova.compute.manager [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2198.580038] env[63024]: DEBUG oslo_concurrency.lockutils [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.671s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2198.582028] env[63024]: INFO nova.compute.claims [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2198.832096] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2198.871458] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Releasing lock "refresh_cache-5919cc21-67b8-47d4-9909-bc972b42914d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2198.871818] env[63024]: DEBUG nova.compute.manager [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Instance network_info: |[{"id": "f3bc6bf4-4559-4a56-b6cb-c71a44d6651b", "address": "fa:16:3e:a5:49:82", "network": {"id": "18684658-e754-4649-b059-43f84e447803", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-48651862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d0715f0ccbd49ec8af8e3049d970994", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98011432-48cc-4ffd-a5a8-b96d2ea4424a", "external-id": "nsx-vlan-transportzone-745", "segmentation_id": 745, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3bc6bf4-45", "ovs_interfaceid": "f3bc6bf4-4559-4a56-b6cb-c71a44d6651b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2198.872338] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a5:49:82', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '98011432-48cc-4ffd-a5a8-b96d2ea4424a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f3bc6bf4-4559-4a56-b6cb-c71a44d6651b', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2198.881889] env[63024]: DEBUG oslo.service.loopingcall [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2198.882158] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2198.882387] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-89add216-870e-4c72-9856-3fdf9976e23f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2198.904275] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2198.904275] env[63024]: value = "task-1951913" [ 2198.904275] env[63024]: _type = "Task" [ 2198.904275] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2198.912345] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951913, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2199.086047] env[63024]: DEBUG nova.compute.utils [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2199.090522] env[63024]: DEBUG nova.compute.manager [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2199.090668] env[63024]: DEBUG nova.network.neutron [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2199.101632] env[63024]: DEBUG nova.compute.manager [req-2005c56a-670e-473a-b7d7-2a64322e7485 req-0dd53b14-5bfd-4ef4-baad-186d972aed22 service nova] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Received event network-changed-f3bc6bf4-4559-4a56-b6cb-c71a44d6651b {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2199.101940] env[63024]: DEBUG nova.compute.manager [req-2005c56a-670e-473a-b7d7-2a64322e7485 req-0dd53b14-5bfd-4ef4-baad-186d972aed22 service nova] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Refreshing instance network info cache due to event network-changed-f3bc6bf4-4559-4a56-b6cb-c71a44d6651b. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2199.102673] env[63024]: DEBUG oslo_concurrency.lockutils [req-2005c56a-670e-473a-b7d7-2a64322e7485 req-0dd53b14-5bfd-4ef4-baad-186d972aed22 service nova] Acquiring lock "refresh_cache-5919cc21-67b8-47d4-9909-bc972b42914d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2199.103337] env[63024]: DEBUG oslo_concurrency.lockutils [req-2005c56a-670e-473a-b7d7-2a64322e7485 req-0dd53b14-5bfd-4ef4-baad-186d972aed22 service nova] Acquired lock "refresh_cache-5919cc21-67b8-47d4-9909-bc972b42914d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2199.103337] env[63024]: DEBUG nova.network.neutron [req-2005c56a-670e-473a-b7d7-2a64322e7485 req-0dd53b14-5bfd-4ef4-baad-186d972aed22 service nova] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Refreshing network info cache for port f3bc6bf4-4559-4a56-b6cb-c71a44d6651b {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2199.138013] env[63024]: DEBUG nova.policy [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '28fab1e92c1d4491986100983f6b4ab1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6072e8931d9540ad8fe4a2b4b1ec782d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 2199.415110] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951913, 'name': CreateVM_Task, 'duration_secs': 0.361556} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2199.416038] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2199.416038] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2199.416235] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2199.416521] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2199.416802] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05d31d90-7cfb-4a66-8b58-45b4d8e89791 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.421999] env[63024]: DEBUG oslo_vmware.api [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 2199.421999] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b2cd09-eb22-c2b0-840e-0c90253ccbb3" [ 2199.421999] env[63024]: _type = "Task" [ 2199.421999] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2199.426409] env[63024]: DEBUG nova.network.neutron [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Successfully created port: f00f50c5-72cc-47ac-b97d-c507d47aa150 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2199.434098] env[63024]: DEBUG oslo_vmware.api [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b2cd09-eb22-c2b0-840e-0c90253ccbb3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2199.591644] env[63024]: DEBUG nova.compute.manager [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2199.696845] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6b7b047-5e2d-4a7a-aac3-780b044dc22f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.706716] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5b1aca1-40cc-4e62-b7ee-54a7f927eb49 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.745015] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2f5d6fc-f522-4c7a-9b58-e06fe3fd5117 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.754777] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39c77e6d-2f8f-4bc2-8a80-fb578e63ab14 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.773295] env[63024]: DEBUG nova.compute.provider_tree [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2199.940030] env[63024]: DEBUG oslo_vmware.api [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b2cd09-eb22-c2b0-840e-0c90253ccbb3, 'name': SearchDatastore_Task, 'duration_secs': 0.01429} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2199.940030] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2199.940030] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2199.940030] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2199.940030] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2199.940030] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2199.940030] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-83dc6a89-37f4-4897-869d-a48c7349103e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.949154] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2199.949746] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2199.950935] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e4b1b21-dfc9-480e-8aae-7e0d2ab59b61 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2199.960017] env[63024]: DEBUG oslo_vmware.api [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 2199.960017] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52994972-1b99-4095-b7bd-5234bc18aab9" [ 2199.960017] env[63024]: _type = "Task" [ 2199.960017] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2199.966363] env[63024]: DEBUG oslo_vmware.api [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52994972-1b99-4095-b7bd-5234bc18aab9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2200.054561] env[63024]: DEBUG nova.network.neutron [req-2005c56a-670e-473a-b7d7-2a64322e7485 req-0dd53b14-5bfd-4ef4-baad-186d972aed22 service nova] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Updated VIF entry in instance network info cache for port f3bc6bf4-4559-4a56-b6cb-c71a44d6651b. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2200.055288] env[63024]: DEBUG nova.network.neutron [req-2005c56a-670e-473a-b7d7-2a64322e7485 req-0dd53b14-5bfd-4ef4-baad-186d972aed22 service nova] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Updating instance_info_cache with network_info: [{"id": "f3bc6bf4-4559-4a56-b6cb-c71a44d6651b", "address": "fa:16:3e:a5:49:82", "network": {"id": "18684658-e754-4649-b059-43f84e447803", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-48651862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d0715f0ccbd49ec8af8e3049d970994", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98011432-48cc-4ffd-a5a8-b96d2ea4424a", "external-id": "nsx-vlan-transportzone-745", "segmentation_id": 745, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3bc6bf4-45", "ovs_interfaceid": "f3bc6bf4-4559-4a56-b6cb-c71a44d6651b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2200.276034] env[63024]: DEBUG nova.scheduler.client.report [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2200.474497] env[63024]: DEBUG oslo_vmware.api [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52994972-1b99-4095-b7bd-5234bc18aab9, 'name': SearchDatastore_Task, 'duration_secs': 0.010148} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2200.475500] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb08b888-9f63-4afa-8612-23b19bfd0bee {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2200.485644] env[63024]: DEBUG oslo_vmware.api [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 2200.485644] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]524042fb-e43c-11b0-9206-92f2e5c73a8c" [ 2200.485644] env[63024]: _type = "Task" [ 2200.485644] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2200.494168] env[63024]: DEBUG oslo_vmware.api [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]524042fb-e43c-11b0-9206-92f2e5c73a8c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2200.558726] env[63024]: DEBUG oslo_concurrency.lockutils [req-2005c56a-670e-473a-b7d7-2a64322e7485 req-0dd53b14-5bfd-4ef4-baad-186d972aed22 service nova] Releasing lock "refresh_cache-5919cc21-67b8-47d4-9909-bc972b42914d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2200.602542] env[63024]: DEBUG nova.compute.manager [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2200.631147] env[63024]: DEBUG nova.virt.hardware [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2200.631147] env[63024]: DEBUG nova.virt.hardware [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2200.631147] env[63024]: DEBUG nova.virt.hardware [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2200.631316] env[63024]: DEBUG nova.virt.hardware [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2200.631618] env[63024]: DEBUG nova.virt.hardware [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2200.631618] env[63024]: DEBUG nova.virt.hardware [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2200.632766] env[63024]: DEBUG nova.virt.hardware [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2200.632766] env[63024]: DEBUG nova.virt.hardware [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2200.632766] env[63024]: DEBUG nova.virt.hardware [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2200.632766] env[63024]: DEBUG nova.virt.hardware [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2200.632766] env[63024]: DEBUG nova.virt.hardware [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2200.633738] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6051b0bc-c59b-43b8-8003-383b8b2435a6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2200.642967] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d333aa1-730d-4948-bc44-4329bee163e4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2200.781779] env[63024]: DEBUG oslo_concurrency.lockutils [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.202s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2200.782395] env[63024]: DEBUG nova.compute.manager [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2200.785866] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.954s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2200.787484] env[63024]: INFO nova.compute.claims [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2200.999482] env[63024]: DEBUG oslo_vmware.api [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]524042fb-e43c-11b0-9206-92f2e5c73a8c, 'name': SearchDatastore_Task, 'duration_secs': 0.011209} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2201.000999] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2201.000999] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 5919cc21-67b8-47d4-9909-bc972b42914d/5919cc21-67b8-47d4-9909-bc972b42914d.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2201.000999] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d9478252-89d4-4563-b6bb-5fc16f6a18ef {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2201.008456] env[63024]: DEBUG oslo_vmware.api [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 2201.008456] env[63024]: value = "task-1951914" [ 2201.008456] env[63024]: _type = "Task" [ 2201.008456] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2201.019051] env[63024]: DEBUG oslo_vmware.api [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951914, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2201.292430] env[63024]: DEBUG nova.compute.utils [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2201.297239] env[63024]: DEBUG nova.compute.manager [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2201.297483] env[63024]: DEBUG nova.network.neutron [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2201.421984] env[63024]: DEBUG nova.policy [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a7c32db2d81e40c492c1362d8356a03c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '93098ad83ae144bf90a12c97ec863c06', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 2201.424743] env[63024]: DEBUG nova.compute.manager [req-6ea763b2-a862-486e-81a4-cd4312e94049 req-986ddeed-806d-41ba-9fd3-b3ebfc20f834 service nova] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Received event network-vif-plugged-f00f50c5-72cc-47ac-b97d-c507d47aa150 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2201.424884] env[63024]: DEBUG oslo_concurrency.lockutils [req-6ea763b2-a862-486e-81a4-cd4312e94049 req-986ddeed-806d-41ba-9fd3-b3ebfc20f834 service nova] Acquiring lock "8901e234-22a9-4523-8658-411aa19e01e0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2201.425119] env[63024]: DEBUG oslo_concurrency.lockutils [req-6ea763b2-a862-486e-81a4-cd4312e94049 req-986ddeed-806d-41ba-9fd3-b3ebfc20f834 service nova] Lock "8901e234-22a9-4523-8658-411aa19e01e0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2201.425283] env[63024]: DEBUG oslo_concurrency.lockutils [req-6ea763b2-a862-486e-81a4-cd4312e94049 req-986ddeed-806d-41ba-9fd3-b3ebfc20f834 service nova] Lock "8901e234-22a9-4523-8658-411aa19e01e0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2201.425461] env[63024]: DEBUG nova.compute.manager [req-6ea763b2-a862-486e-81a4-cd4312e94049 req-986ddeed-806d-41ba-9fd3-b3ebfc20f834 service nova] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] No waiting events found dispatching network-vif-plugged-f00f50c5-72cc-47ac-b97d-c507d47aa150 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2201.425732] env[63024]: WARNING nova.compute.manager [req-6ea763b2-a862-486e-81a4-cd4312e94049 req-986ddeed-806d-41ba-9fd3-b3ebfc20f834 service nova] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Received unexpected event network-vif-plugged-f00f50c5-72cc-47ac-b97d-c507d47aa150 for instance with vm_state building and task_state spawning. [ 2201.521941] env[63024]: DEBUG oslo_vmware.api [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951914, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2201.751525] env[63024]: DEBUG nova.network.neutron [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Successfully updated port: f00f50c5-72cc-47ac-b97d-c507d47aa150 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2201.798228] env[63024]: DEBUG nova.compute.manager [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2201.918645] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cff791c1-3c3c-437b-8ef1-23a575b9f1f9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2201.927324] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8135d7f0-9918-48f2-8663-ad396ca033f8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2201.960537] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-159b460c-490d-4bfc-9092-9499ab948b1b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2201.969245] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-465ffff2-903e-4ce4-a156-dff3c92bf9cd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2201.985495] env[63024]: DEBUG nova.compute.provider_tree [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2201.988097] env[63024]: DEBUG nova.network.neutron [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Successfully created port: 3cb168eb-0496-4b9c-9924-dddfc19369f8 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2202.019956] env[63024]: DEBUG oslo_vmware.api [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951914, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.65171} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2202.020209] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 5919cc21-67b8-47d4-9909-bc972b42914d/5919cc21-67b8-47d4-9909-bc972b42914d.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2202.020275] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2202.020505] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c24cba67-5297-4f8b-a886-4009b0037dce {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.027844] env[63024]: DEBUG oslo_vmware.api [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 2202.027844] env[63024]: value = "task-1951915" [ 2202.027844] env[63024]: _type = "Task" [ 2202.027844] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2202.035253] env[63024]: DEBUG oslo_vmware.api [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951915, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2202.256126] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "refresh_cache-8901e234-22a9-4523-8658-411aa19e01e0" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2202.256126] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquired lock "refresh_cache-8901e234-22a9-4523-8658-411aa19e01e0" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2202.256317] env[63024]: DEBUG nova.network.neutron [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2202.494040] env[63024]: DEBUG nova.scheduler.client.report [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2202.541086] env[63024]: DEBUG oslo_vmware.api [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951915, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.10179} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2202.541263] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2202.542069] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4be00e33-be53-4121-9745-3218dbdba00c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.565730] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Reconfiguring VM instance instance-00000075 to attach disk [datastore1] 5919cc21-67b8-47d4-9909-bc972b42914d/5919cc21-67b8-47d4-9909-bc972b42914d.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2202.566125] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c739a507-6230-4c16-8e8b-bd0773604b2b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.589356] env[63024]: DEBUG oslo_vmware.api [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 2202.589356] env[63024]: value = "task-1951916" [ 2202.589356] env[63024]: _type = "Task" [ 2202.589356] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2202.598294] env[63024]: DEBUG oslo_vmware.api [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951916, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2202.677632] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2202.677896] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2202.792222] env[63024]: DEBUG nova.network.neutron [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2202.815220] env[63024]: DEBUG nova.compute.manager [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2202.843467] env[63024]: DEBUG nova.virt.hardware [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2202.843719] env[63024]: DEBUG nova.virt.hardware [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2202.843874] env[63024]: DEBUG nova.virt.hardware [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2202.844235] env[63024]: DEBUG nova.virt.hardware [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2202.844235] env[63024]: DEBUG nova.virt.hardware [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2202.844406] env[63024]: DEBUG nova.virt.hardware [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2202.844562] env[63024]: DEBUG nova.virt.hardware [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2202.844725] env[63024]: DEBUG nova.virt.hardware [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2202.845409] env[63024]: DEBUG nova.virt.hardware [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2202.845409] env[63024]: DEBUG nova.virt.hardware [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2202.845409] env[63024]: DEBUG nova.virt.hardware [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2202.846261] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c182bde-2292-46de-89af-9dc3ac287b3e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.859081] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-895bc22f-bf3c-4789-a84c-8acd01eb8e99 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2202.969560] env[63024]: DEBUG nova.network.neutron [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Updating instance_info_cache with network_info: [{"id": "f00f50c5-72cc-47ac-b97d-c507d47aa150", "address": "fa:16:3e:39:d0:25", "network": {"id": "0cbe22f7-6322-4d92-9a77-2753f6449a2d", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-366684254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6072e8931d9540ad8fe4a2b4b1ec782d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3ccbdbb-8b49-4a26-913f-2a448b72280f", "external-id": "nsx-vlan-transportzone-412", "segmentation_id": 412, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf00f50c5-72", "ovs_interfaceid": "f00f50c5-72cc-47ac-b97d-c507d47aa150", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2202.999454] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.213s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2203.000025] env[63024]: DEBUG nova.compute.manager [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2203.099880] env[63024]: DEBUG oslo_vmware.api [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951916, 'name': ReconfigVM_Task, 'duration_secs': 0.297139} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2203.100469] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Reconfigured VM instance instance-00000075 to attach disk [datastore1] 5919cc21-67b8-47d4-9909-bc972b42914d/5919cc21-67b8-47d4-9909-bc972b42914d.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2203.100802] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e8a31ea6-f3d8-4f34-b1ba-c0b4f0e1bfdb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.107419] env[63024]: DEBUG oslo_vmware.api [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 2203.107419] env[63024]: value = "task-1951917" [ 2203.107419] env[63024]: _type = "Task" [ 2203.107419] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2203.115734] env[63024]: DEBUG oslo_vmware.api [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951917, 'name': Rename_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2203.183327] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2203.183468] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Starting heal instance info cache {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10258}} [ 2203.183584] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Rebuilding the list of instances to heal {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10262}} [ 2203.234287] env[63024]: DEBUG oslo_vmware.rw_handles [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f5462c-09c1-8edd-5437-ea64ab3bb9f9/disk-0.vmdk. {{(pid=63024) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2203.235371] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a0991af-a6d1-4287-a621-57ff3cfaaa6b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.242697] env[63024]: DEBUG oslo_vmware.rw_handles [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f5462c-09c1-8edd-5437-ea64ab3bb9f9/disk-0.vmdk is in state: ready. {{(pid=63024) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2203.242893] env[63024]: ERROR oslo_vmware.rw_handles [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f5462c-09c1-8edd-5437-ea64ab3bb9f9/disk-0.vmdk due to incomplete transfer. [ 2203.243125] env[63024]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-25b08810-4c39-4178-bcf3-82e868d3f938 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.250584] env[63024]: DEBUG oslo_vmware.rw_handles [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52f5462c-09c1-8edd-5437-ea64ab3bb9f9/disk-0.vmdk. {{(pid=63024) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 2203.250784] env[63024]: DEBUG nova.virt.vmwareapi.images [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Uploaded image bca855b4-4ff0-428a-971b-15d087bf0880 to the Glance image server {{(pid=63024) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 2203.253079] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Destroying the VM {{(pid=63024) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2203.253316] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-7eb28ada-1e42-4340-bc37-1aa531f56ffd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.259416] env[63024]: DEBUG oslo_vmware.api [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2203.259416] env[63024]: value = "task-1951918" [ 2203.259416] env[63024]: _type = "Task" [ 2203.259416] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2203.267649] env[63024]: DEBUG oslo_vmware.api [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951918, 'name': Destroy_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2203.463672] env[63024]: DEBUG nova.compute.manager [req-144573ad-1c19-42f8-baf9-1f252cefae21 req-233dd88b-695c-4c30-bfb1-a2d88ca2d898 service nova] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Received event network-changed-f00f50c5-72cc-47ac-b97d-c507d47aa150 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2203.464061] env[63024]: DEBUG nova.compute.manager [req-144573ad-1c19-42f8-baf9-1f252cefae21 req-233dd88b-695c-4c30-bfb1-a2d88ca2d898 service nova] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Refreshing instance network info cache due to event network-changed-f00f50c5-72cc-47ac-b97d-c507d47aa150. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2203.464468] env[63024]: DEBUG oslo_concurrency.lockutils [req-144573ad-1c19-42f8-baf9-1f252cefae21 req-233dd88b-695c-4c30-bfb1-a2d88ca2d898 service nova] Acquiring lock "refresh_cache-8901e234-22a9-4523-8658-411aa19e01e0" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2203.473222] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Releasing lock "refresh_cache-8901e234-22a9-4523-8658-411aa19e01e0" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2203.473771] env[63024]: DEBUG nova.compute.manager [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Instance network_info: |[{"id": "f00f50c5-72cc-47ac-b97d-c507d47aa150", "address": "fa:16:3e:39:d0:25", "network": {"id": "0cbe22f7-6322-4d92-9a77-2753f6449a2d", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-366684254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6072e8931d9540ad8fe4a2b4b1ec782d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3ccbdbb-8b49-4a26-913f-2a448b72280f", "external-id": "nsx-vlan-transportzone-412", "segmentation_id": 412, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf00f50c5-72", "ovs_interfaceid": "f00f50c5-72cc-47ac-b97d-c507d47aa150", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2203.474275] env[63024]: DEBUG oslo_concurrency.lockutils [req-144573ad-1c19-42f8-baf9-1f252cefae21 req-233dd88b-695c-4c30-bfb1-a2d88ca2d898 service nova] Acquired lock "refresh_cache-8901e234-22a9-4523-8658-411aa19e01e0" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2203.474468] env[63024]: DEBUG nova.network.neutron [req-144573ad-1c19-42f8-baf9-1f252cefae21 req-233dd88b-695c-4c30-bfb1-a2d88ca2d898 service nova] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Refreshing network info cache for port f00f50c5-72cc-47ac-b97d-c507d47aa150 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2203.475676] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:39:d0:25', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f3ccbdbb-8b49-4a26-913f-2a448b72280f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f00f50c5-72cc-47ac-b97d-c507d47aa150', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2203.484941] env[63024]: DEBUG oslo.service.loopingcall [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2203.486144] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2203.486510] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fec70dec-82fa-4835-a9bd-bd043b596c8a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.506777] env[63024]: DEBUG nova.compute.utils [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2203.512021] env[63024]: DEBUG nova.compute.manager [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2203.512021] env[63024]: DEBUG nova.network.neutron [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2203.513209] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2203.513209] env[63024]: value = "task-1951919" [ 2203.513209] env[63024]: _type = "Task" [ 2203.513209] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2203.524834] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951919, 'name': CreateVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2203.602719] env[63024]: DEBUG nova.policy [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '27151e89c8ee4ddd9285bff3795a82b2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0e53c02ad56640dc8cbc8839669b67bf', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 2203.619647] env[63024]: DEBUG oslo_vmware.api [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951917, 'name': Rename_Task, 'duration_secs': 0.136332} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2203.620057] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2203.620394] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-024a2fea-6197-4338-8946-1ec1745e2669 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.628787] env[63024]: DEBUG oslo_vmware.api [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 2203.628787] env[63024]: value = "task-1951920" [ 2203.628787] env[63024]: _type = "Task" [ 2203.628787] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2203.641385] env[63024]: DEBUG oslo_vmware.api [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951920, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2203.689185] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Skipping network cache update for instance because it is Building. {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10271}} [ 2203.689383] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Skipping network cache update for instance because it is Building. {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10271}} [ 2203.689561] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Skipping network cache update for instance because it is Building. {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10271}} [ 2203.689711] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Skipping network cache update for instance because it is Building. {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10271}} [ 2203.689938] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "refresh_cache-14bafeba-9f5b-4488-b29c-38939973deb9" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2203.690093] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquired lock "refresh_cache-14bafeba-9f5b-4488-b29c-38939973deb9" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2203.690251] env[63024]: DEBUG nova.network.neutron [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Forcefully refreshing network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2203.690477] env[63024]: DEBUG nova.objects.instance [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lazy-loading 'info_cache' on Instance uuid 14bafeba-9f5b-4488-b29c-38939973deb9 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2203.772312] env[63024]: DEBUG oslo_vmware.api [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951918, 'name': Destroy_Task, 'duration_secs': 0.420763} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2203.772688] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Destroyed the VM [ 2203.773583] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Deleting Snapshot of the VM instance {{(pid=63024) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2203.774048] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-90028053-2765-4042-960e-72daf946c6d8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2203.785844] env[63024]: DEBUG oslo_vmware.api [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2203.785844] env[63024]: value = "task-1951921" [ 2203.785844] env[63024]: _type = "Task" [ 2203.785844] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2203.795279] env[63024]: DEBUG oslo_vmware.api [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951921, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2203.887818] env[63024]: DEBUG nova.network.neutron [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Successfully updated port: 3cb168eb-0496-4b9c-9924-dddfc19369f8 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2204.011519] env[63024]: DEBUG nova.compute.manager [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2204.028553] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951919, 'name': CreateVM_Task, 'duration_secs': 0.365406} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2204.028895] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2204.029831] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2204.030181] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2204.030558] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2204.030833] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-376ca95f-486a-4f9c-96b5-96ff17983e0d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2204.038217] env[63024]: DEBUG oslo_vmware.api [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 2204.038217] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52863047-1c98-ad0d-8437-c9c238b54a28" [ 2204.038217] env[63024]: _type = "Task" [ 2204.038217] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2204.050294] env[63024]: DEBUG oslo_vmware.api [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52863047-1c98-ad0d-8437-c9c238b54a28, 'name': SearchDatastore_Task, 'duration_secs': 0.009452} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2204.050604] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2204.050863] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2204.051665] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2204.051665] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2204.052881] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2204.052881] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-12fc9839-49de-4110-811c-068940131433 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2204.063625] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2204.063841] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2204.067929] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-894324c5-56e1-4ef1-8a40-6536cc511f24 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2204.074603] env[63024]: DEBUG oslo_vmware.api [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 2204.074603] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]528fec2c-052b-7539-cf26-b5e72ed432f6" [ 2204.074603] env[63024]: _type = "Task" [ 2204.074603] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2204.084278] env[63024]: DEBUG oslo_vmware.api [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]528fec2c-052b-7539-cf26-b5e72ed432f6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2204.140150] env[63024]: DEBUG oslo_vmware.api [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951920, 'name': PowerOnVM_Task, 'duration_secs': 0.501738} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2204.140886] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2204.141233] env[63024]: INFO nova.compute.manager [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Took 7.47 seconds to spawn the instance on the hypervisor. [ 2204.141547] env[63024]: DEBUG nova.compute.manager [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2204.142495] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-517e8283-6d11-4075-b46d-2e2f5139b2e6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2204.159886] env[63024]: DEBUG nova.network.neutron [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Successfully created port: 4dc6f8a3-c6c8-459b-8a05-db8924adb128 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2204.295953] env[63024]: DEBUG oslo_vmware.api [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951921, 'name': RemoveSnapshot_Task, 'duration_secs': 0.435557} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2204.296259] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Deleted Snapshot of the VM instance {{(pid=63024) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2204.296534] env[63024]: DEBUG nova.compute.manager [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2204.297456] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6809106-f7cd-4863-8fa6-9c95c7b0c414 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2204.391931] env[63024]: DEBUG oslo_concurrency.lockutils [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "refresh_cache-233d087b-923e-46d4-a47f-b024583ce0f8" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2204.392160] env[63024]: DEBUG oslo_concurrency.lockutils [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquired lock "refresh_cache-233d087b-923e-46d4-a47f-b024583ce0f8" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2204.392319] env[63024]: DEBUG nova.network.neutron [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2204.591549] env[63024]: DEBUG oslo_vmware.api [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]528fec2c-052b-7539-cf26-b5e72ed432f6, 'name': SearchDatastore_Task, 'duration_secs': 0.009373} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2204.592447] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fbda1360-7a64-4be7-8caf-6548f918a875 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2204.601343] env[63024]: DEBUG oslo_vmware.api [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 2204.601343] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52150897-4711-6df5-02fa-97b5c497791a" [ 2204.601343] env[63024]: _type = "Task" [ 2204.601343] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2204.613993] env[63024]: DEBUG oslo_vmware.api [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52150897-4711-6df5-02fa-97b5c497791a, 'name': SearchDatastore_Task, 'duration_secs': 0.010717} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2204.613993] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2204.613993] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 8901e234-22a9-4523-8658-411aa19e01e0/8901e234-22a9-4523-8658-411aa19e01e0.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2204.613993] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-66c6f61c-75d6-40a6-a6d0-5d9f442e2c64 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2204.619612] env[63024]: DEBUG oslo_vmware.api [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 2204.619612] env[63024]: value = "task-1951922" [ 2204.619612] env[63024]: _type = "Task" [ 2204.619612] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2204.627478] env[63024]: DEBUG oslo_vmware.api [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951922, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2204.668359] env[63024]: INFO nova.compute.manager [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Took 14.14 seconds to build instance. [ 2204.812427] env[63024]: DEBUG nova.network.neutron [req-144573ad-1c19-42f8-baf9-1f252cefae21 req-233dd88b-695c-4c30-bfb1-a2d88ca2d898 service nova] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Updated VIF entry in instance network info cache for port f00f50c5-72cc-47ac-b97d-c507d47aa150. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2204.812780] env[63024]: DEBUG nova.network.neutron [req-144573ad-1c19-42f8-baf9-1f252cefae21 req-233dd88b-695c-4c30-bfb1-a2d88ca2d898 service nova] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Updating instance_info_cache with network_info: [{"id": "f00f50c5-72cc-47ac-b97d-c507d47aa150", "address": "fa:16:3e:39:d0:25", "network": {"id": "0cbe22f7-6322-4d92-9a77-2753f6449a2d", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-366684254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6072e8931d9540ad8fe4a2b4b1ec782d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3ccbdbb-8b49-4a26-913f-2a448b72280f", "external-id": "nsx-vlan-transportzone-412", "segmentation_id": 412, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf00f50c5-72", "ovs_interfaceid": "f00f50c5-72cc-47ac-b97d-c507d47aa150", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2204.814083] env[63024]: INFO nova.compute.manager [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Shelve offloading [ 2204.992818] env[63024]: DEBUG nova.network.neutron [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2205.038446] env[63024]: DEBUG nova.compute.manager [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2205.088240] env[63024]: DEBUG nova.virt.hardware [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2205.088540] env[63024]: DEBUG nova.virt.hardware [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2205.088720] env[63024]: DEBUG nova.virt.hardware [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2205.088910] env[63024]: DEBUG nova.virt.hardware [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2205.089074] env[63024]: DEBUG nova.virt.hardware [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2205.089224] env[63024]: DEBUG nova.virt.hardware [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2205.089437] env[63024]: DEBUG nova.virt.hardware [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2205.089648] env[63024]: DEBUG nova.virt.hardware [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2205.089841] env[63024]: DEBUG nova.virt.hardware [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2205.090160] env[63024]: DEBUG nova.virt.hardware [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2205.090410] env[63024]: DEBUG nova.virt.hardware [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2205.091695] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9da9eb62-cb3f-4139-90f5-3042c48151df {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.102010] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7a92326-ebf8-4133-9617-b87bca429583 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.131765] env[63024]: DEBUG oslo_vmware.api [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951922, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.502859} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2205.132296] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 8901e234-22a9-4523-8658-411aa19e01e0/8901e234-22a9-4523-8658-411aa19e01e0.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2205.132436] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2205.132904] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-72aef977-f420-4876-8c00-0a1ab56a731d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.140679] env[63024]: DEBUG oslo_vmware.api [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 2205.140679] env[63024]: value = "task-1951923" [ 2205.140679] env[63024]: _type = "Task" [ 2205.140679] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2205.151342] env[63024]: DEBUG oslo_vmware.api [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951923, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2205.170424] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cefed067-84ad-4af9-bbe0-7230a866bc8e tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "5919cc21-67b8-47d4-9909-bc972b42914d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.653s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2205.278433] env[63024]: DEBUG nova.network.neutron [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Updating instance_info_cache with network_info: [{"id": "3cb168eb-0496-4b9c-9924-dddfc19369f8", "address": "fa:16:3e:73:33:c0", "network": {"id": "c25a78c5-2c6d-4c87-8575-9620fbc983bd", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1693993539-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93098ad83ae144bf90a12c97ec863c06", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e66c4ebe-f808-4b34-bdb5-6c45edb1736f", "external-id": "cl2-zone-719", "segmentation_id": 719, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cb168eb-04", "ovs_interfaceid": "3cb168eb-0496-4b9c-9924-dddfc19369f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2205.316759] env[63024]: DEBUG oslo_concurrency.lockutils [req-144573ad-1c19-42f8-baf9-1f252cefae21 req-233dd88b-695c-4c30-bfb1-a2d88ca2d898 service nova] Releasing lock "refresh_cache-8901e234-22a9-4523-8658-411aa19e01e0" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2205.318989] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2205.318989] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-86060383-2c51-4296-b7ae-6057ffe23f79 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.327444] env[63024]: DEBUG oslo_vmware.api [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2205.327444] env[63024]: value = "task-1951924" [ 2205.327444] env[63024]: _type = "Task" [ 2205.327444] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2205.337744] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] VM already powered off {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2205.337960] env[63024]: DEBUG nova.compute.manager [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2205.339556] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1f1aaa9-1601-4f4b-982d-8af7b59c6e04 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.344422] env[63024]: DEBUG oslo_concurrency.lockutils [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquiring lock "refresh_cache-14bafeba-9f5b-4488-b29c-38939973deb9" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2205.502035] env[63024]: DEBUG nova.compute.manager [req-853c179a-812d-4ee6-b981-407db468c1e3 req-c08b9181-5f48-444a-8fd6-7b054fbf33c9 service nova] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Received event network-vif-plugged-3cb168eb-0496-4b9c-9924-dddfc19369f8 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2205.502582] env[63024]: DEBUG oslo_concurrency.lockutils [req-853c179a-812d-4ee6-b981-407db468c1e3 req-c08b9181-5f48-444a-8fd6-7b054fbf33c9 service nova] Acquiring lock "233d087b-923e-46d4-a47f-b024583ce0f8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2205.502944] env[63024]: DEBUG oslo_concurrency.lockutils [req-853c179a-812d-4ee6-b981-407db468c1e3 req-c08b9181-5f48-444a-8fd6-7b054fbf33c9 service nova] Lock "233d087b-923e-46d4-a47f-b024583ce0f8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2205.503875] env[63024]: DEBUG oslo_concurrency.lockutils [req-853c179a-812d-4ee6-b981-407db468c1e3 req-c08b9181-5f48-444a-8fd6-7b054fbf33c9 service nova] Lock "233d087b-923e-46d4-a47f-b024583ce0f8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2205.503875] env[63024]: DEBUG nova.compute.manager [req-853c179a-812d-4ee6-b981-407db468c1e3 req-c08b9181-5f48-444a-8fd6-7b054fbf33c9 service nova] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] No waiting events found dispatching network-vif-plugged-3cb168eb-0496-4b9c-9924-dddfc19369f8 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2205.504099] env[63024]: WARNING nova.compute.manager [req-853c179a-812d-4ee6-b981-407db468c1e3 req-c08b9181-5f48-444a-8fd6-7b054fbf33c9 service nova] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Received unexpected event network-vif-plugged-3cb168eb-0496-4b9c-9924-dddfc19369f8 for instance with vm_state building and task_state spawning. [ 2205.504401] env[63024]: DEBUG nova.compute.manager [req-853c179a-812d-4ee6-b981-407db468c1e3 req-c08b9181-5f48-444a-8fd6-7b054fbf33c9 service nova] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Received event network-changed-3cb168eb-0496-4b9c-9924-dddfc19369f8 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2205.504671] env[63024]: DEBUG nova.compute.manager [req-853c179a-812d-4ee6-b981-407db468c1e3 req-c08b9181-5f48-444a-8fd6-7b054fbf33c9 service nova] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Refreshing instance network info cache due to event network-changed-3cb168eb-0496-4b9c-9924-dddfc19369f8. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2205.505305] env[63024]: DEBUG oslo_concurrency.lockutils [req-853c179a-812d-4ee6-b981-407db468c1e3 req-c08b9181-5f48-444a-8fd6-7b054fbf33c9 service nova] Acquiring lock "refresh_cache-233d087b-923e-46d4-a47f-b024583ce0f8" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2205.653972] env[63024]: DEBUG oslo_vmware.api [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951923, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065201} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2205.654266] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2205.655064] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-580d5fd4-8153-427f-91c1-da54bf5196f7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.677993] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Reconfiguring VM instance instance-00000076 to attach disk [datastore1] 8901e234-22a9-4523-8658-411aa19e01e0/8901e234-22a9-4523-8658-411aa19e01e0.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2205.678341] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dad5ba2b-84e5-4f63-b038-f95f2ac942d5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.698873] env[63024]: DEBUG oslo_vmware.api [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 2205.698873] env[63024]: value = "task-1951925" [ 2205.698873] env[63024]: _type = "Task" [ 2205.698873] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2205.707222] env[63024]: DEBUG oslo_vmware.api [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951925, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2205.738190] env[63024]: DEBUG nova.network.neutron [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Updating instance_info_cache with network_info: [{"id": "4ba24717-2947-46f0-9df8-733d8b40c345", "address": "fa:16:3e:3b:41:f5", "network": {"id": "5503f5f5-9a63-4bb8-bc39-97863100c3df", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1218611362-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd0c44555e30414c83750b762e243dc1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ba24717-29", "ovs_interfaceid": "4ba24717-2947-46f0-9df8-733d8b40c345", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2205.780530] env[63024]: DEBUG oslo_concurrency.lockutils [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Releasing lock "refresh_cache-233d087b-923e-46d4-a47f-b024583ce0f8" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2205.781068] env[63024]: DEBUG nova.compute.manager [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Instance network_info: |[{"id": "3cb168eb-0496-4b9c-9924-dddfc19369f8", "address": "fa:16:3e:73:33:c0", "network": {"id": "c25a78c5-2c6d-4c87-8575-9620fbc983bd", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1693993539-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93098ad83ae144bf90a12c97ec863c06", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e66c4ebe-f808-4b34-bdb5-6c45edb1736f", "external-id": "cl2-zone-719", "segmentation_id": 719, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cb168eb-04", "ovs_interfaceid": "3cb168eb-0496-4b9c-9924-dddfc19369f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2205.781336] env[63024]: DEBUG oslo_concurrency.lockutils [req-853c179a-812d-4ee6-b981-407db468c1e3 req-c08b9181-5f48-444a-8fd6-7b054fbf33c9 service nova] Acquired lock "refresh_cache-233d087b-923e-46d4-a47f-b024583ce0f8" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2205.781898] env[63024]: DEBUG nova.network.neutron [req-853c179a-812d-4ee6-b981-407db468c1e3 req-c08b9181-5f48-444a-8fd6-7b054fbf33c9 service nova] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Refreshing network info cache for port 3cb168eb-0496-4b9c-9924-dddfc19369f8 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2205.783703] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:73:33:c0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e66c4ebe-f808-4b34-bdb5-6c45edb1736f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3cb168eb-0496-4b9c-9924-dddfc19369f8', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2205.793198] env[63024]: DEBUG oslo.service.loopingcall [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2205.793198] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2205.793198] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fc36073f-855e-4f14-a95b-8f11514809e3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2205.813998] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Acquiring lock "6917758b-4b68-4a5a-b7e5-b2ffdade19d7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2205.814254] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Lock "6917758b-4b68-4a5a-b7e5-b2ffdade19d7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2205.819694] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2205.819694] env[63024]: value = "task-1951926" [ 2205.819694] env[63024]: _type = "Task" [ 2205.819694] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2205.829845] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951926, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2206.208842] env[63024]: DEBUG oslo_vmware.api [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951925, 'name': ReconfigVM_Task, 'duration_secs': 0.325911} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2206.209284] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Reconfigured VM instance instance-00000076 to attach disk [datastore1] 8901e234-22a9-4523-8658-411aa19e01e0/8901e234-22a9-4523-8658-411aa19e01e0.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2206.209919] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8c50062e-3911-4f2f-91bb-b94ff3928c07 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.216410] env[63024]: DEBUG oslo_vmware.api [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 2206.216410] env[63024]: value = "task-1951927" [ 2206.216410] env[63024]: _type = "Task" [ 2206.216410] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2206.224229] env[63024]: DEBUG oslo_vmware.api [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951927, 'name': Rename_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2206.243713] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Releasing lock "refresh_cache-14bafeba-9f5b-4488-b29c-38939973deb9" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2206.243942] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Updated the network info_cache for instance {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10329}} [ 2206.244238] env[63024]: DEBUG oslo_concurrency.lockutils [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquired lock "refresh_cache-14bafeba-9f5b-4488-b29c-38939973deb9" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2206.244443] env[63024]: DEBUG nova.network.neutron [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2206.245702] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2206.245922] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2206.246161] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2206.246263] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2206.246483] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2206.246584] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2206.246701] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63024) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10877}} [ 2206.247080] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager.update_available_resource {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2206.316576] env[63024]: DEBUG nova.compute.manager [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2206.332275] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951926, 'name': CreateVM_Task, 'duration_secs': 0.389813} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2206.332460] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2206.333164] env[63024]: DEBUG oslo_concurrency.lockutils [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2206.333331] env[63024]: DEBUG oslo_concurrency.lockutils [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2206.333641] env[63024]: DEBUG oslo_concurrency.lockutils [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2206.334280] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-109fa385-2ace-4fab-949f-5b0b5561258d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.341362] env[63024]: DEBUG oslo_vmware.api [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2206.341362] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]522388b8-ce80-10bf-70fd-546b82975e87" [ 2206.341362] env[63024]: _type = "Task" [ 2206.341362] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2206.356393] env[63024]: DEBUG oslo_vmware.api [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]522388b8-ce80-10bf-70fd-546b82975e87, 'name': SearchDatastore_Task, 'duration_secs': 0.010277} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2206.356775] env[63024]: DEBUG oslo_concurrency.lockutils [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2206.357082] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2206.357366] env[63024]: DEBUG oslo_concurrency.lockutils [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2206.357557] env[63024]: DEBUG oslo_concurrency.lockutils [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2206.357780] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2206.358478] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-224c3591-5b9d-4e6d-9fd7-5c95fee7cbc1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.367680] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2206.367905] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2206.368630] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a5baf383-6ea7-43f8-86e3-cfa90a37f62b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.375820] env[63024]: DEBUG oslo_vmware.api [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2206.375820] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5276ac78-48e8-6220-9860-111d72b64e85" [ 2206.375820] env[63024]: _type = "Task" [ 2206.375820] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2206.383292] env[63024]: DEBUG oslo_vmware.api [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5276ac78-48e8-6220-9860-111d72b64e85, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2206.585372] env[63024]: DEBUG nova.network.neutron [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Successfully updated port: 4dc6f8a3-c6c8-459b-8a05-db8924adb128 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2206.638424] env[63024]: DEBUG nova.network.neutron [req-853c179a-812d-4ee6-b981-407db468c1e3 req-c08b9181-5f48-444a-8fd6-7b054fbf33c9 service nova] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Updated VIF entry in instance network info cache for port 3cb168eb-0496-4b9c-9924-dddfc19369f8. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2206.638679] env[63024]: DEBUG nova.network.neutron [req-853c179a-812d-4ee6-b981-407db468c1e3 req-c08b9181-5f48-444a-8fd6-7b054fbf33c9 service nova] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Updating instance_info_cache with network_info: [{"id": "3cb168eb-0496-4b9c-9924-dddfc19369f8", "address": "fa:16:3e:73:33:c0", "network": {"id": "c25a78c5-2c6d-4c87-8575-9620fbc983bd", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1693993539-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93098ad83ae144bf90a12c97ec863c06", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e66c4ebe-f808-4b34-bdb5-6c45edb1736f", "external-id": "cl2-zone-719", "segmentation_id": 719, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3cb168eb-04", "ovs_interfaceid": "3cb168eb-0496-4b9c-9924-dddfc19369f8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2206.728663] env[63024]: DEBUG oslo_vmware.api [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951927, 'name': Rename_Task, 'duration_secs': 0.144588} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2206.729039] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2206.729368] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-928e8666-e135-4f52-81e4-165ef85dcb6e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.737388] env[63024]: DEBUG oslo_vmware.api [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 2206.737388] env[63024]: value = "task-1951928" [ 2206.737388] env[63024]: _type = "Task" [ 2206.737388] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2206.748510] env[63024]: DEBUG oslo_vmware.api [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951928, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2206.754638] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2206.754956] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2206.755199] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2206.755375] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63024) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2206.758503] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da1eb209-17fd-455b-8196-5fc0c12a0db2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.766610] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faaa2372-f1b8-45f7-959d-eb4387bd9b22 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.782105] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5548d3f7-e390-4a83-8b29-f19a60030ddd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.789169] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a01cb60e-2044-4524-ae05-daec8181c13b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.819533] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179718MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=63024) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2206.819533] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2206.819752] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2206.843415] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2206.886712] env[63024]: DEBUG oslo_vmware.api [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5276ac78-48e8-6220-9860-111d72b64e85, 'name': SearchDatastore_Task, 'duration_secs': 0.009012} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2206.887521] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7287296d-0f6a-4782-a65b-3d9a18a29015 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.892749] env[63024]: DEBUG oslo_vmware.api [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2206.892749] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e0e97e-f322-43d3-7b6b-428029eb18b9" [ 2206.892749] env[63024]: _type = "Task" [ 2206.892749] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2206.905308] env[63024]: DEBUG oslo_vmware.api [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e0e97e-f322-43d3-7b6b-428029eb18b9, 'name': SearchDatastore_Task, 'duration_secs': 0.010213} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2206.905576] env[63024]: DEBUG oslo_concurrency.lockutils [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2206.905846] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 233d087b-923e-46d4-a47f-b024583ce0f8/233d087b-923e-46d4-a47f-b024583ce0f8.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2206.906150] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d957547f-a674-4e39-b5de-dcc1866aac32 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2206.912762] env[63024]: DEBUG oslo_vmware.api [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2206.912762] env[63024]: value = "task-1951929" [ 2206.912762] env[63024]: _type = "Task" [ 2206.912762] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2206.920958] env[63024]: DEBUG oslo_vmware.api [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951929, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2207.016252] env[63024]: DEBUG nova.network.neutron [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Updating instance_info_cache with network_info: [{"id": "4ba24717-2947-46f0-9df8-733d8b40c345", "address": "fa:16:3e:3b:41:f5", "network": {"id": "5503f5f5-9a63-4bb8-bc39-97863100c3df", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1218611362-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd0c44555e30414c83750b762e243dc1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ba24717-29", "ovs_interfaceid": "4ba24717-2947-46f0-9df8-733d8b40c345", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2207.088277] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "refresh_cache-7d78b891-34c0-46dd-8b0d-ce80517232e1" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2207.088474] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquired lock "refresh_cache-7d78b891-34c0-46dd-8b0d-ce80517232e1" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2207.088595] env[63024]: DEBUG nova.network.neutron [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2207.141594] env[63024]: DEBUG oslo_concurrency.lockutils [req-853c179a-812d-4ee6-b981-407db468c1e3 req-c08b9181-5f48-444a-8fd6-7b054fbf33c9 service nova] Releasing lock "refresh_cache-233d087b-923e-46d4-a47f-b024583ce0f8" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2207.248646] env[63024]: DEBUG oslo_vmware.api [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951928, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2207.423176] env[63024]: DEBUG oslo_vmware.api [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951929, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2207.518574] env[63024]: DEBUG oslo_concurrency.lockutils [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Releasing lock "refresh_cache-14bafeba-9f5b-4488-b29c-38939973deb9" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2207.538416] env[63024]: DEBUG nova.compute.manager [req-9e2728fb-9f07-4eeb-b597-77166e356b68 req-fc60e36b-d865-4e9c-a6a6-90379c385852 service nova] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Received event network-changed-f3bc6bf4-4559-4a56-b6cb-c71a44d6651b {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2207.538866] env[63024]: DEBUG nova.compute.manager [req-9e2728fb-9f07-4eeb-b597-77166e356b68 req-fc60e36b-d865-4e9c-a6a6-90379c385852 service nova] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Refreshing instance network info cache due to event network-changed-f3bc6bf4-4559-4a56-b6cb-c71a44d6651b. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2207.538866] env[63024]: DEBUG oslo_concurrency.lockutils [req-9e2728fb-9f07-4eeb-b597-77166e356b68 req-fc60e36b-d865-4e9c-a6a6-90379c385852 service nova] Acquiring lock "refresh_cache-5919cc21-67b8-47d4-9909-bc972b42914d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2207.538972] env[63024]: DEBUG oslo_concurrency.lockutils [req-9e2728fb-9f07-4eeb-b597-77166e356b68 req-fc60e36b-d865-4e9c-a6a6-90379c385852 service nova] Acquired lock "refresh_cache-5919cc21-67b8-47d4-9909-bc972b42914d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2207.539177] env[63024]: DEBUG nova.network.neutron [req-9e2728fb-9f07-4eeb-b597-77166e356b68 req-fc60e36b-d865-4e9c-a6a6-90379c385852 service nova] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Refreshing network info cache for port f3bc6bf4-4559-4a56-b6cb-c71a44d6651b {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2207.639795] env[63024]: DEBUG nova.network.neutron [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2207.749958] env[63024]: DEBUG oslo_vmware.api [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951928, 'name': PowerOnVM_Task} progress is 89%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2207.848560] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 14bafeba-9f5b-4488-b29c-38939973deb9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2207.849259] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 5919cc21-67b8-47d4-9909-bc972b42914d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2207.849259] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 8901e234-22a9-4523-8658-411aa19e01e0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2207.849259] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 233d087b-923e-46d4-a47f-b024583ce0f8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2207.849259] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 7d78b891-34c0-46dd-8b0d-ce80517232e1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2207.874381] env[63024]: DEBUG nova.network.neutron [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Updating instance_info_cache with network_info: [{"id": "4dc6f8a3-c6c8-459b-8a05-db8924adb128", "address": "fa:16:3e:74:e2:14", "network": {"id": "27687c23-521d-4beb-ad4f-278994149c2c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-381619610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e53c02ad56640dc8cbc8839669b67bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "09bf081b-cdf0-4977-abe2-2339a87409ab", "external-id": "nsx-vlan-transportzone-378", "segmentation_id": 378, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4dc6f8a3-c6", "ovs_interfaceid": "4dc6f8a3-c6c8-459b-8a05-db8924adb128", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2207.923707] env[63024]: DEBUG oslo_vmware.api [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951929, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.528822} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2207.923973] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 233d087b-923e-46d4-a47f-b024583ce0f8/233d087b-923e-46d4-a47f-b024583ce0f8.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2207.924208] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2207.924457] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b7c59331-6aa5-4d60-9206-f5769fbebf04 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2207.931076] env[63024]: DEBUG oslo_vmware.api [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2207.931076] env[63024]: value = "task-1951930" [ 2207.931076] env[63024]: _type = "Task" [ 2207.931076] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2207.939278] env[63024]: DEBUG oslo_vmware.api [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951930, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2208.032859] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2208.033754] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7d9c5bb-0cff-48d2-a843-9c6d0856cf52 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.043428] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2208.043652] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a35a43ba-b17a-4151-9a90-4f1660c8f1d3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.251405] env[63024]: DEBUG oslo_vmware.api [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951928, 'name': PowerOnVM_Task, 'duration_secs': 1.118243} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2208.251727] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2208.251858] env[63024]: INFO nova.compute.manager [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Took 7.65 seconds to spawn the instance on the hypervisor. [ 2208.252052] env[63024]: DEBUG nova.compute.manager [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2208.252822] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44f93acb-68ce-468e-a321-23843b12b1c0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.274983] env[63024]: DEBUG nova.network.neutron [req-9e2728fb-9f07-4eeb-b597-77166e356b68 req-fc60e36b-d865-4e9c-a6a6-90379c385852 service nova] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Updated VIF entry in instance network info cache for port f3bc6bf4-4559-4a56-b6cb-c71a44d6651b. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2208.275342] env[63024]: DEBUG nova.network.neutron [req-9e2728fb-9f07-4eeb-b597-77166e356b68 req-fc60e36b-d865-4e9c-a6a6-90379c385852 service nova] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Updating instance_info_cache with network_info: [{"id": "f3bc6bf4-4559-4a56-b6cb-c71a44d6651b", "address": "fa:16:3e:a5:49:82", "network": {"id": "18684658-e754-4649-b059-43f84e447803", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-48651862-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.219", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d0715f0ccbd49ec8af8e3049d970994", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "98011432-48cc-4ffd-a5a8-b96d2ea4424a", "external-id": "nsx-vlan-transportzone-745", "segmentation_id": 745, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf3bc6bf4-45", "ovs_interfaceid": "f3bc6bf4-4559-4a56-b6cb-c71a44d6651b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2208.352857] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 6917758b-4b68-4a5a-b7e5-b2ffdade19d7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2208.352980] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2208.353157] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1472MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2208.377374] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Releasing lock "refresh_cache-7d78b891-34c0-46dd-8b0d-ce80517232e1" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2208.377374] env[63024]: DEBUG nova.compute.manager [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Instance network_info: |[{"id": "4dc6f8a3-c6c8-459b-8a05-db8924adb128", "address": "fa:16:3e:74:e2:14", "network": {"id": "27687c23-521d-4beb-ad4f-278994149c2c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-381619610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e53c02ad56640dc8cbc8839669b67bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "09bf081b-cdf0-4977-abe2-2339a87409ab", "external-id": "nsx-vlan-transportzone-378", "segmentation_id": 378, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4dc6f8a3-c6", "ovs_interfaceid": "4dc6f8a3-c6c8-459b-8a05-db8924adb128", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2208.377665] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:74:e2:14', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '09bf081b-cdf0-4977-abe2-2339a87409ab', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4dc6f8a3-c6c8-459b-8a05-db8924adb128', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2208.384998] env[63024]: DEBUG oslo.service.loopingcall [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2208.387764] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2208.388214] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3d383601-d5ee-42af-9e94-50c5f4bd9748 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.410779] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2208.410779] env[63024]: value = "task-1951932" [ 2208.410779] env[63024]: _type = "Task" [ 2208.410779] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2208.420415] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951932, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2208.429487] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2208.429558] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2208.429707] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Deleting the datastore file [datastore1] 14bafeba-9f5b-4488-b29c-38939973deb9 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2208.431037] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c6e92482-35a0-4d03-9ef7-0c186efe8f8e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.443158] env[63024]: DEBUG oslo_vmware.api [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2208.443158] env[63024]: value = "task-1951933" [ 2208.443158] env[63024]: _type = "Task" [ 2208.443158] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2208.443437] env[63024]: DEBUG oslo_vmware.api [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951930, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078765} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2208.443688] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2208.447202] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b33269f0-01ea-45a0-8a3e-4cc625bf8116 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.451687] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec1709dc-6fc1-47d4-8eb9-718846c3e898 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.475227] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Reconfiguring VM instance instance-00000077 to attach disk [datastore1] 233d087b-923e-46d4-a47f-b024583ce0f8/233d087b-923e-46d4-a47f-b024583ce0f8.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2208.477857] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-486f3e1e-be41-4257-b109-94766c30d88e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.492384] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a809c38a-3502-41fb-80cc-ecd0df524172 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.495410] env[63024]: DEBUG oslo_vmware.api [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951933, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2208.522561] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51fda48a-6123-4f43-aebe-305cf4376f00 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.525040] env[63024]: DEBUG oslo_vmware.api [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2208.525040] env[63024]: value = "task-1951934" [ 2208.525040] env[63024]: _type = "Task" [ 2208.525040] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2208.532688] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce78e2d0-a57a-4bfd-924d-410586b569d6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2208.538925] env[63024]: DEBUG oslo_vmware.api [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951934, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2208.548174] env[63024]: DEBUG nova.compute.provider_tree [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2208.771500] env[63024]: INFO nova.compute.manager [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Took 13.96 seconds to build instance. [ 2208.778173] env[63024]: DEBUG oslo_concurrency.lockutils [req-9e2728fb-9f07-4eeb-b597-77166e356b68 req-fc60e36b-d865-4e9c-a6a6-90379c385852 service nova] Releasing lock "refresh_cache-5919cc21-67b8-47d4-9909-bc972b42914d" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2208.778434] env[63024]: DEBUG nova.compute.manager [req-9e2728fb-9f07-4eeb-b597-77166e356b68 req-fc60e36b-d865-4e9c-a6a6-90379c385852 service nova] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Received event network-vif-plugged-4dc6f8a3-c6c8-459b-8a05-db8924adb128 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2208.778630] env[63024]: DEBUG oslo_concurrency.lockutils [req-9e2728fb-9f07-4eeb-b597-77166e356b68 req-fc60e36b-d865-4e9c-a6a6-90379c385852 service nova] Acquiring lock "7d78b891-34c0-46dd-8b0d-ce80517232e1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2208.778833] env[63024]: DEBUG oslo_concurrency.lockutils [req-9e2728fb-9f07-4eeb-b597-77166e356b68 req-fc60e36b-d865-4e9c-a6a6-90379c385852 service nova] Lock "7d78b891-34c0-46dd-8b0d-ce80517232e1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2208.779007] env[63024]: DEBUG oslo_concurrency.lockutils [req-9e2728fb-9f07-4eeb-b597-77166e356b68 req-fc60e36b-d865-4e9c-a6a6-90379c385852 service nova] Lock "7d78b891-34c0-46dd-8b0d-ce80517232e1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2208.779182] env[63024]: DEBUG nova.compute.manager [req-9e2728fb-9f07-4eeb-b597-77166e356b68 req-fc60e36b-d865-4e9c-a6a6-90379c385852 service nova] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] No waiting events found dispatching network-vif-plugged-4dc6f8a3-c6c8-459b-8a05-db8924adb128 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2208.779340] env[63024]: WARNING nova.compute.manager [req-9e2728fb-9f07-4eeb-b597-77166e356b68 req-fc60e36b-d865-4e9c-a6a6-90379c385852 service nova] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Received unexpected event network-vif-plugged-4dc6f8a3-c6c8-459b-8a05-db8924adb128 for instance with vm_state building and task_state spawning. [ 2208.779493] env[63024]: DEBUG nova.compute.manager [req-9e2728fb-9f07-4eeb-b597-77166e356b68 req-fc60e36b-d865-4e9c-a6a6-90379c385852 service nova] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Received event network-changed-4dc6f8a3-c6c8-459b-8a05-db8924adb128 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2208.779648] env[63024]: DEBUG nova.compute.manager [req-9e2728fb-9f07-4eeb-b597-77166e356b68 req-fc60e36b-d865-4e9c-a6a6-90379c385852 service nova] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Refreshing instance network info cache due to event network-changed-4dc6f8a3-c6c8-459b-8a05-db8924adb128. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2208.779833] env[63024]: DEBUG oslo_concurrency.lockutils [req-9e2728fb-9f07-4eeb-b597-77166e356b68 req-fc60e36b-d865-4e9c-a6a6-90379c385852 service nova] Acquiring lock "refresh_cache-7d78b891-34c0-46dd-8b0d-ce80517232e1" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2208.779974] env[63024]: DEBUG oslo_concurrency.lockutils [req-9e2728fb-9f07-4eeb-b597-77166e356b68 req-fc60e36b-d865-4e9c-a6a6-90379c385852 service nova] Acquired lock "refresh_cache-7d78b891-34c0-46dd-8b0d-ce80517232e1" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2208.780163] env[63024]: DEBUG nova.network.neutron [req-9e2728fb-9f07-4eeb-b597-77166e356b68 req-fc60e36b-d865-4e9c-a6a6-90379c385852 service nova] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Refreshing network info cache for port 4dc6f8a3-c6c8-459b-8a05-db8924adb128 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2208.921896] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951932, 'name': CreateVM_Task} progress is 25%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2208.953224] env[63024]: DEBUG oslo_vmware.api [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951933, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.278195} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2208.953519] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2208.953683] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2208.953851] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2208.974186] env[63024]: INFO nova.scheduler.client.report [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Deleted allocations for instance 14bafeba-9f5b-4488-b29c-38939973deb9 [ 2209.037032] env[63024]: DEBUG oslo_vmware.api [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951934, 'name': ReconfigVM_Task, 'duration_secs': 0.534299} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2209.038103] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Reconfigured VM instance instance-00000077 to attach disk [datastore1] 233d087b-923e-46d4-a47f-b024583ce0f8/233d087b-923e-46d4-a47f-b024583ce0f8.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2209.038103] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-97e12e6a-d566-4ef4-84d8-bfda93e6e377 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2209.044232] env[63024]: DEBUG oslo_vmware.api [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2209.044232] env[63024]: value = "task-1951935" [ 2209.044232] env[63024]: _type = "Task" [ 2209.044232] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2209.051209] env[63024]: DEBUG nova.scheduler.client.report [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2209.057185] env[63024]: DEBUG oslo_vmware.api [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951935, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2209.274122] env[63024]: DEBUG oslo_concurrency.lockutils [None req-d72c687f-263b-49b0-a436-9401e5dd056c tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "8901e234-22a9-4523-8658-411aa19e01e0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.474s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2209.430212] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951932, 'name': CreateVM_Task, 'duration_secs': 0.64772} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2209.430392] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2209.431104] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2209.431277] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2209.431622] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2209.431890] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18a66da7-a0fd-495f-a83b-32525dfe6c8c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2209.440758] env[63024]: DEBUG oslo_vmware.api [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2209.440758] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52560fff-f5f8-40ca-6ca7-50d02aa99867" [ 2209.440758] env[63024]: _type = "Task" [ 2209.440758] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2209.453962] env[63024]: DEBUG oslo_vmware.api [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52560fff-f5f8-40ca-6ca7-50d02aa99867, 'name': SearchDatastore_Task, 'duration_secs': 0.011395} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2209.454660] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2209.455158] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2209.455643] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2209.456052] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2209.457021] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2209.457021] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-524c1716-8e65-46cd-8213-b76afb237eb3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2209.465820] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2209.466288] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2209.467314] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d574f79f-e646-40bf-8e96-90addab89254 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2209.477052] env[63024]: DEBUG oslo_vmware.api [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2209.477052] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]523edf9c-42d2-3da0-6151-79979ee57890" [ 2209.477052] env[63024]: _type = "Task" [ 2209.477052] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2209.479196] env[63024]: DEBUG oslo_concurrency.lockutils [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2209.485538] env[63024]: DEBUG oslo_vmware.api [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]523edf9c-42d2-3da0-6151-79979ee57890, 'name': SearchDatastore_Task, 'duration_secs': 0.00917} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2209.486517] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4a1455c-e452-4e9e-baea-9c1ad419001a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2209.491942] env[63024]: DEBUG oslo_vmware.api [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2209.491942] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52755ee9-77c6-3b48-b8df-877ed4554a05" [ 2209.491942] env[63024]: _type = "Task" [ 2209.491942] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2209.501274] env[63024]: DEBUG oslo_vmware.api [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52755ee9-77c6-3b48-b8df-877ed4554a05, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2209.554204] env[63024]: DEBUG oslo_vmware.api [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951935, 'name': Rename_Task, 'duration_secs': 0.147464} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2209.557300] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2209.557597] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-80aed6ce-3f25-4a18-b5e3-79307cf890d4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2209.559655] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63024) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2209.559902] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.740s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2209.560541] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.717s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2209.562044] env[63024]: INFO nova.compute.claims [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2209.575226] env[63024]: DEBUG nova.compute.manager [req-6c8366b2-bda6-4379-b0fe-e0cf16175e3b req-b798c52d-caa8-42c7-a328-77f1052eb284 service nova] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Received event network-vif-unplugged-4ba24717-2947-46f0-9df8-733d8b40c345 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2209.575455] env[63024]: DEBUG oslo_concurrency.lockutils [req-6c8366b2-bda6-4379-b0fe-e0cf16175e3b req-b798c52d-caa8-42c7-a328-77f1052eb284 service nova] Acquiring lock "14bafeba-9f5b-4488-b29c-38939973deb9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2209.575978] env[63024]: DEBUG oslo_concurrency.lockutils [req-6c8366b2-bda6-4379-b0fe-e0cf16175e3b req-b798c52d-caa8-42c7-a328-77f1052eb284 service nova] Lock "14bafeba-9f5b-4488-b29c-38939973deb9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2209.575978] env[63024]: DEBUG oslo_concurrency.lockutils [req-6c8366b2-bda6-4379-b0fe-e0cf16175e3b req-b798c52d-caa8-42c7-a328-77f1052eb284 service nova] Lock "14bafeba-9f5b-4488-b29c-38939973deb9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2209.575978] env[63024]: DEBUG nova.compute.manager [req-6c8366b2-bda6-4379-b0fe-e0cf16175e3b req-b798c52d-caa8-42c7-a328-77f1052eb284 service nova] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] No waiting events found dispatching network-vif-unplugged-4ba24717-2947-46f0-9df8-733d8b40c345 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2209.576763] env[63024]: WARNING nova.compute.manager [req-6c8366b2-bda6-4379-b0fe-e0cf16175e3b req-b798c52d-caa8-42c7-a328-77f1052eb284 service nova] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Received unexpected event network-vif-unplugged-4ba24717-2947-46f0-9df8-733d8b40c345 for instance with vm_state shelved_offloaded and task_state None. [ 2209.576763] env[63024]: DEBUG nova.compute.manager [req-6c8366b2-bda6-4379-b0fe-e0cf16175e3b req-b798c52d-caa8-42c7-a328-77f1052eb284 service nova] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Received event network-changed-4ba24717-2947-46f0-9df8-733d8b40c345 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2209.576763] env[63024]: DEBUG nova.compute.manager [req-6c8366b2-bda6-4379-b0fe-e0cf16175e3b req-b798c52d-caa8-42c7-a328-77f1052eb284 service nova] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Refreshing instance network info cache due to event network-changed-4ba24717-2947-46f0-9df8-733d8b40c345. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2209.576763] env[63024]: DEBUG oslo_concurrency.lockutils [req-6c8366b2-bda6-4379-b0fe-e0cf16175e3b req-b798c52d-caa8-42c7-a328-77f1052eb284 service nova] Acquiring lock "refresh_cache-14bafeba-9f5b-4488-b29c-38939973deb9" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2209.576911] env[63024]: DEBUG oslo_concurrency.lockutils [req-6c8366b2-bda6-4379-b0fe-e0cf16175e3b req-b798c52d-caa8-42c7-a328-77f1052eb284 service nova] Acquired lock "refresh_cache-14bafeba-9f5b-4488-b29c-38939973deb9" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2209.576948] env[63024]: DEBUG nova.network.neutron [req-6c8366b2-bda6-4379-b0fe-e0cf16175e3b req-b798c52d-caa8-42c7-a328-77f1052eb284 service nova] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Refreshing network info cache for port 4ba24717-2947-46f0-9df8-733d8b40c345 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2209.581020] env[63024]: DEBUG oslo_vmware.api [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2209.581020] env[63024]: value = "task-1951936" [ 2209.581020] env[63024]: _type = "Task" [ 2209.581020] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2209.590976] env[63024]: DEBUG oslo_vmware.api [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951936, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2209.606374] env[63024]: DEBUG nova.network.neutron [req-9e2728fb-9f07-4eeb-b597-77166e356b68 req-fc60e36b-d865-4e9c-a6a6-90379c385852 service nova] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Updated VIF entry in instance network info cache for port 4dc6f8a3-c6c8-459b-8a05-db8924adb128. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2209.607022] env[63024]: DEBUG nova.network.neutron [req-9e2728fb-9f07-4eeb-b597-77166e356b68 req-fc60e36b-d865-4e9c-a6a6-90379c385852 service nova] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Updating instance_info_cache with network_info: [{"id": "4dc6f8a3-c6c8-459b-8a05-db8924adb128", "address": "fa:16:3e:74:e2:14", "network": {"id": "27687c23-521d-4beb-ad4f-278994149c2c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-381619610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e53c02ad56640dc8cbc8839669b67bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "09bf081b-cdf0-4977-abe2-2339a87409ab", "external-id": "nsx-vlan-transportzone-378", "segmentation_id": 378, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4dc6f8a3-c6", "ovs_interfaceid": "4dc6f8a3-c6c8-459b-8a05-db8924adb128", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2209.826646] env[63024]: DEBUG nova.compute.manager [req-7f0c7c1c-a74f-4803-a148-c0d2ca66cf34 req-e84bade9-10d4-416e-ab1e-ed46f34888b4 service nova] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Received event network-changed-f00f50c5-72cc-47ac-b97d-c507d47aa150 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2209.826646] env[63024]: DEBUG nova.compute.manager [req-7f0c7c1c-a74f-4803-a148-c0d2ca66cf34 req-e84bade9-10d4-416e-ab1e-ed46f34888b4 service nova] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Refreshing instance network info cache due to event network-changed-f00f50c5-72cc-47ac-b97d-c507d47aa150. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2209.826646] env[63024]: DEBUG oslo_concurrency.lockutils [req-7f0c7c1c-a74f-4803-a148-c0d2ca66cf34 req-e84bade9-10d4-416e-ab1e-ed46f34888b4 service nova] Acquiring lock "refresh_cache-8901e234-22a9-4523-8658-411aa19e01e0" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2209.826854] env[63024]: DEBUG oslo_concurrency.lockutils [req-7f0c7c1c-a74f-4803-a148-c0d2ca66cf34 req-e84bade9-10d4-416e-ab1e-ed46f34888b4 service nova] Acquired lock "refresh_cache-8901e234-22a9-4523-8658-411aa19e01e0" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2209.826905] env[63024]: DEBUG nova.network.neutron [req-7f0c7c1c-a74f-4803-a148-c0d2ca66cf34 req-e84bade9-10d4-416e-ab1e-ed46f34888b4 service nova] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Refreshing network info cache for port f00f50c5-72cc-47ac-b97d-c507d47aa150 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2210.004605] env[63024]: DEBUG oslo_vmware.api [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52755ee9-77c6-3b48-b8df-877ed4554a05, 'name': SearchDatastore_Task, 'duration_secs': 0.008575} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2210.004939] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2210.005292] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 7d78b891-34c0-46dd-8b0d-ce80517232e1/7d78b891-34c0-46dd-8b0d-ce80517232e1.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2210.005674] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3b5e9f48-406b-47c3-8ac4-16a08998755c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2210.012879] env[63024]: DEBUG oslo_vmware.api [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2210.012879] env[63024]: value = "task-1951937" [ 2210.012879] env[63024]: _type = "Task" [ 2210.012879] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2210.021795] env[63024]: DEBUG oslo_vmware.api [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951937, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2210.095258] env[63024]: DEBUG oslo_vmware.api [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951936, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2210.109811] env[63024]: DEBUG oslo_concurrency.lockutils [req-9e2728fb-9f07-4eeb-b597-77166e356b68 req-fc60e36b-d865-4e9c-a6a6-90379c385852 service nova] Releasing lock "refresh_cache-7d78b891-34c0-46dd-8b0d-ce80517232e1" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2210.536606] env[63024]: DEBUG oslo_vmware.api [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951937, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2210.594541] env[63024]: DEBUG oslo_vmware.api [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951936, 'name': PowerOnVM_Task, 'duration_secs': 0.667963} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2210.597323] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2210.597541] env[63024]: INFO nova.compute.manager [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Took 7.78 seconds to spawn the instance on the hypervisor. [ 2210.597724] env[63024]: DEBUG nova.compute.manager [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2210.599605] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78928f48-0d2b-4ab1-a50b-af33615dc74c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2210.650478] env[63024]: DEBUG nova.network.neutron [req-6c8366b2-bda6-4379-b0fe-e0cf16175e3b req-b798c52d-caa8-42c7-a328-77f1052eb284 service nova] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Updated VIF entry in instance network info cache for port 4ba24717-2947-46f0-9df8-733d8b40c345. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2210.650832] env[63024]: DEBUG nova.network.neutron [req-6c8366b2-bda6-4379-b0fe-e0cf16175e3b req-b798c52d-caa8-42c7-a328-77f1052eb284 service nova] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Updating instance_info_cache with network_info: [{"id": "4ba24717-2947-46f0-9df8-733d8b40c345", "address": "fa:16:3e:3b:41:f5", "network": {"id": "5503f5f5-9a63-4bb8-bc39-97863100c3df", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1218611362-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd0c44555e30414c83750b762e243dc1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap4ba24717-29", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2210.684802] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8b5e82f-6c09-4484-87b7-71ae757f0d15 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2210.692422] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-704ad74c-4416-47b0-9ed7-c70b3d16b89a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2210.726267] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9897a0c3-3601-4f75-b652-28a364fc399c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2210.738183] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c95a1e48-1d14-4a71-8256-3dfb74685a5c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2210.756569] env[63024]: DEBUG nova.compute.provider_tree [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2210.849345] env[63024]: DEBUG nova.network.neutron [req-7f0c7c1c-a74f-4803-a148-c0d2ca66cf34 req-e84bade9-10d4-416e-ab1e-ed46f34888b4 service nova] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Updated VIF entry in instance network info cache for port f00f50c5-72cc-47ac-b97d-c507d47aa150. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2210.849795] env[63024]: DEBUG nova.network.neutron [req-7f0c7c1c-a74f-4803-a148-c0d2ca66cf34 req-e84bade9-10d4-416e-ab1e-ed46f34888b4 service nova] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Updating instance_info_cache with network_info: [{"id": "f00f50c5-72cc-47ac-b97d-c507d47aa150", "address": "fa:16:3e:39:d0:25", "network": {"id": "0cbe22f7-6322-4d92-9a77-2753f6449a2d", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-366684254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.141", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6072e8931d9540ad8fe4a2b4b1ec782d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3ccbdbb-8b49-4a26-913f-2a448b72280f", "external-id": "nsx-vlan-transportzone-412", "segmentation_id": 412, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf00f50c5-72", "ovs_interfaceid": "f00f50c5-72cc-47ac-b97d-c507d47aa150", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2211.024798] env[63024]: DEBUG oslo_vmware.api [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951937, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.541215} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2211.025143] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 7d78b891-34c0-46dd-8b0d-ce80517232e1/7d78b891-34c0-46dd-8b0d-ce80517232e1.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2211.025367] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2211.025621] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1c887464-49a5-43f7-9758-87ba6471c959 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.031519] env[63024]: DEBUG oslo_vmware.api [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2211.031519] env[63024]: value = "task-1951938" [ 2211.031519] env[63024]: _type = "Task" [ 2211.031519] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2211.040483] env[63024]: DEBUG oslo_vmware.api [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951938, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2211.063241] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquiring lock "14bafeba-9f5b-4488-b29c-38939973deb9" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2211.117469] env[63024]: INFO nova.compute.manager [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Took 16.22 seconds to build instance. [ 2211.153716] env[63024]: DEBUG oslo_concurrency.lockutils [req-6c8366b2-bda6-4379-b0fe-e0cf16175e3b req-b798c52d-caa8-42c7-a328-77f1052eb284 service nova] Releasing lock "refresh_cache-14bafeba-9f5b-4488-b29c-38939973deb9" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2211.262059] env[63024]: DEBUG nova.scheduler.client.report [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2211.352727] env[63024]: DEBUG oslo_concurrency.lockutils [req-7f0c7c1c-a74f-4803-a148-c0d2ca66cf34 req-e84bade9-10d4-416e-ab1e-ed46f34888b4 service nova] Releasing lock "refresh_cache-8901e234-22a9-4523-8658-411aa19e01e0" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2211.544638] env[63024]: DEBUG oslo_vmware.api [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951938, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.214014} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2211.544991] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2211.545656] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b522c94-df04-4257-a433-cd20cb07f31a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.568078] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Reconfiguring VM instance instance-00000078 to attach disk [datastore1] 7d78b891-34c0-46dd-8b0d-ce80517232e1/7d78b891-34c0-46dd-8b0d-ce80517232e1.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2211.568419] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-56caf078-0518-479c-a772-d37aaf5bf4b6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.588876] env[63024]: DEBUG oslo_vmware.api [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2211.588876] env[63024]: value = "task-1951939" [ 2211.588876] env[63024]: _type = "Task" [ 2211.588876] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2211.598134] env[63024]: DEBUG oslo_vmware.api [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951939, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2211.619816] env[63024]: DEBUG oslo_concurrency.lockutils [None req-69eab057-019b-42cf-bdc2-c2a9375ae50d tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "233d087b-923e-46d4-a47f-b024583ce0f8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.731s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2211.767384] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.207s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2211.768139] env[63024]: DEBUG nova.compute.manager [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2211.771310] env[63024]: DEBUG oslo_concurrency.lockutils [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.292s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2211.771728] env[63024]: DEBUG nova.objects.instance [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lazy-loading 'resources' on Instance uuid 14bafeba-9f5b-4488-b29c-38939973deb9 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2211.923815] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e87e29f-268e-40b5-859e-ce3b6e50735f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.931298] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-41a09068-1fbc-4541-be8c-b62d256082e8 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Suspending the VM {{(pid=63024) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 2211.931899] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-94356f42-3a0b-4d55-8072-5bfa30012397 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2211.938765] env[63024]: DEBUG oslo_vmware.api [None req-41a09068-1fbc-4541-be8c-b62d256082e8 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2211.938765] env[63024]: value = "task-1951940" [ 2211.938765] env[63024]: _type = "Task" [ 2211.938765] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2211.948101] env[63024]: DEBUG oslo_vmware.api [None req-41a09068-1fbc-4541-be8c-b62d256082e8 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951940, 'name': SuspendVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2212.099415] env[63024]: DEBUG oslo_vmware.api [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951939, 'name': ReconfigVM_Task, 'duration_secs': 0.437783} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2212.099702] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Reconfigured VM instance instance-00000078 to attach disk [datastore1] 7d78b891-34c0-46dd-8b0d-ce80517232e1/7d78b891-34c0-46dd-8b0d-ce80517232e1.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2212.100409] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-84a2b63b-c8ac-4814-a19a-89cfa018e9ab {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2212.106907] env[63024]: DEBUG oslo_vmware.api [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2212.106907] env[63024]: value = "task-1951941" [ 2212.106907] env[63024]: _type = "Task" [ 2212.106907] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2212.115561] env[63024]: DEBUG oslo_vmware.api [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951941, 'name': Rename_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2212.275191] env[63024]: DEBUG nova.compute.utils [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2212.277051] env[63024]: DEBUG nova.objects.instance [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lazy-loading 'numa_topology' on Instance uuid 14bafeba-9f5b-4488-b29c-38939973deb9 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2212.278100] env[63024]: DEBUG nova.compute.manager [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2212.278394] env[63024]: DEBUG nova.network.neutron [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2212.353651] env[63024]: DEBUG nova.policy [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3e0efcea6d8346ea96781a38cb19516c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ddb337ef934641bf88758506aa9b91ff', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 2212.451275] env[63024]: DEBUG oslo_vmware.api [None req-41a09068-1fbc-4541-be8c-b62d256082e8 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951940, 'name': SuspendVM_Task} progress is 75%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2212.618752] env[63024]: DEBUG oslo_vmware.api [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951941, 'name': Rename_Task, 'duration_secs': 0.257214} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2212.619108] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2212.619307] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-54f48dfd-c0d9-4833-8264-582c08fc746a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2212.626268] env[63024]: DEBUG oslo_vmware.api [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2212.626268] env[63024]: value = "task-1951942" [ 2212.626268] env[63024]: _type = "Task" [ 2212.626268] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2212.633911] env[63024]: DEBUG oslo_vmware.api [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951942, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2212.654661] env[63024]: DEBUG nova.network.neutron [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] Successfully created port: b28e4901-8be8-43da-a093-669823ad01ad {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2212.778901] env[63024]: DEBUG nova.compute.manager [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2212.781630] env[63024]: DEBUG nova.objects.base [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Object Instance<14bafeba-9f5b-4488-b29c-38939973deb9> lazy-loaded attributes: resources,numa_topology {{(pid=63024) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2212.877930] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f1e5c47-9908-4f73-99e9-8b1c6f9470d7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2212.885390] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5b32cf6-857c-4980-8685-43c41ec9db36 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2212.915331] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbdc2a2c-3d84-4817-bcc2-759cec3bbe63 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2212.922870] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40466398-4f5e-45af-a6d8-bed026edcbb6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2212.936251] env[63024]: DEBUG nova.compute.provider_tree [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2212.948717] env[63024]: DEBUG oslo_vmware.api [None req-41a09068-1fbc-4541-be8c-b62d256082e8 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951940, 'name': SuspendVM_Task, 'duration_secs': 0.578655} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2212.949592] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-41a09068-1fbc-4541-be8c-b62d256082e8 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Suspended the VM {{(pid=63024) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 2212.949766] env[63024]: DEBUG nova.compute.manager [None req-41a09068-1fbc-4541-be8c-b62d256082e8 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2212.950589] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6837c437-5fb5-422d-bf02-c514bfe4e621 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2213.138205] env[63024]: DEBUG oslo_vmware.api [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951942, 'name': PowerOnVM_Task} progress is 71%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2213.455899] env[63024]: ERROR nova.scheduler.client.report [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [req-9f927aa1-b8fe-4792-9650-fcff02f5601d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 89dfa68a-133e-436f-a9f1-86051f9fb96b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-9f927aa1-b8fe-4792-9650-fcff02f5601d"}]} [ 2213.475795] env[63024]: DEBUG nova.scheduler.client.report [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Refreshing inventories for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 2213.489116] env[63024]: DEBUG nova.scheduler.client.report [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Updating ProviderTree inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 2213.489352] env[63024]: DEBUG nova.compute.provider_tree [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2213.500392] env[63024]: DEBUG nova.scheduler.client.report [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Refreshing aggregate associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, aggregates: None {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 2213.517775] env[63024]: DEBUG nova.scheduler.client.report [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Refreshing trait associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 2213.590367] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f196596-2474-44d3-9be2-5a07917f08f5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2213.598666] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f0e371e-6866-4315-a4d4-f17eebdfe911 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2213.632090] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9399572-3c9c-43c8-9426-39c241d6d61d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2213.642588] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39cf89c2-152d-4723-bd61-3727b44552df {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2213.646311] env[63024]: DEBUG oslo_vmware.api [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951942, 'name': PowerOnVM_Task} progress is 88%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2213.656713] env[63024]: DEBUG nova.compute.provider_tree [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2213.790645] env[63024]: DEBUG nova.compute.manager [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2213.818586] env[63024]: DEBUG nova.virt.hardware [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2213.818801] env[63024]: DEBUG nova.virt.hardware [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2213.818961] env[63024]: DEBUG nova.virt.hardware [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2213.819158] env[63024]: DEBUG nova.virt.hardware [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2213.819306] env[63024]: DEBUG nova.virt.hardware [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2213.819449] env[63024]: DEBUG nova.virt.hardware [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2213.819655] env[63024]: DEBUG nova.virt.hardware [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2213.819809] env[63024]: DEBUG nova.virt.hardware [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2213.819975] env[63024]: DEBUG nova.virt.hardware [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2213.820159] env[63024]: DEBUG nova.virt.hardware [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2213.820336] env[63024]: DEBUG nova.virt.hardware [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2213.821272] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fcce2f7-f8d4-41f2-a352-7c4ca43501c8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2213.832463] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0438c674-e6d9-4074-b35f-491cdd1189fa {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2214.138700] env[63024]: DEBUG oslo_vmware.api [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951942, 'name': PowerOnVM_Task, 'duration_secs': 1.45799} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2214.138957] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2214.139180] env[63024]: INFO nova.compute.manager [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Took 9.10 seconds to spawn the instance on the hypervisor. [ 2214.139361] env[63024]: DEBUG nova.compute.manager [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2214.140126] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f95cc0f-114b-4151-ad0a-0e9f22f93c46 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2214.189441] env[63024]: DEBUG nova.scheduler.client.report [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Updated inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with generation 186 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 2214.189703] env[63024]: DEBUG nova.compute.provider_tree [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Updating resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b generation from 186 to 187 during operation: update_inventory {{(pid=63024) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2214.189890] env[63024]: DEBUG nova.compute.provider_tree [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2214.329887] env[63024]: DEBUG oslo_concurrency.lockutils [None req-11594836-58ba-43cf-825b-a0ad55d32163 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "233d087b-923e-46d4-a47f-b024583ce0f8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2214.330199] env[63024]: DEBUG oslo_concurrency.lockutils [None req-11594836-58ba-43cf-825b-a0ad55d32163 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "233d087b-923e-46d4-a47f-b024583ce0f8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2214.330419] env[63024]: DEBUG oslo_concurrency.lockutils [None req-11594836-58ba-43cf-825b-a0ad55d32163 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "233d087b-923e-46d4-a47f-b024583ce0f8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2214.330603] env[63024]: DEBUG oslo_concurrency.lockutils [None req-11594836-58ba-43cf-825b-a0ad55d32163 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "233d087b-923e-46d4-a47f-b024583ce0f8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2214.330774] env[63024]: DEBUG oslo_concurrency.lockutils [None req-11594836-58ba-43cf-825b-a0ad55d32163 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "233d087b-923e-46d4-a47f-b024583ce0f8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2214.333456] env[63024]: INFO nova.compute.manager [None req-11594836-58ba-43cf-825b-a0ad55d32163 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Terminating instance [ 2214.656222] env[63024]: INFO nova.compute.manager [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Took 15.84 seconds to build instance. [ 2214.696103] env[63024]: DEBUG oslo_concurrency.lockutils [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.923s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2214.739401] env[63024]: DEBUG nova.compute.manager [req-1f69efc1-d719-414a-8e83-1b12af10d9c9 req-929e2f11-e340-44bd-98c0-67ddb2ec9f93 service nova] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] Received event network-vif-plugged-b28e4901-8be8-43da-a093-669823ad01ad {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2214.740040] env[63024]: DEBUG oslo_concurrency.lockutils [req-1f69efc1-d719-414a-8e83-1b12af10d9c9 req-929e2f11-e340-44bd-98c0-67ddb2ec9f93 service nova] Acquiring lock "6917758b-4b68-4a5a-b7e5-b2ffdade19d7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2214.740270] env[63024]: DEBUG oslo_concurrency.lockutils [req-1f69efc1-d719-414a-8e83-1b12af10d9c9 req-929e2f11-e340-44bd-98c0-67ddb2ec9f93 service nova] Lock "6917758b-4b68-4a5a-b7e5-b2ffdade19d7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2214.740445] env[63024]: DEBUG oslo_concurrency.lockutils [req-1f69efc1-d719-414a-8e83-1b12af10d9c9 req-929e2f11-e340-44bd-98c0-67ddb2ec9f93 service nova] Lock "6917758b-4b68-4a5a-b7e5-b2ffdade19d7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2214.740619] env[63024]: DEBUG nova.compute.manager [req-1f69efc1-d719-414a-8e83-1b12af10d9c9 req-929e2f11-e340-44bd-98c0-67ddb2ec9f93 service nova] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] No waiting events found dispatching network-vif-plugged-b28e4901-8be8-43da-a093-669823ad01ad {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2214.740785] env[63024]: WARNING nova.compute.manager [req-1f69efc1-d719-414a-8e83-1b12af10d9c9 req-929e2f11-e340-44bd-98c0-67ddb2ec9f93 service nova] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] Received unexpected event network-vif-plugged-b28e4901-8be8-43da-a093-669823ad01ad for instance with vm_state building and task_state spawning. [ 2214.836206] env[63024]: DEBUG nova.network.neutron [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] Successfully updated port: b28e4901-8be8-43da-a093-669823ad01ad {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2214.841394] env[63024]: DEBUG nova.compute.manager [None req-11594836-58ba-43cf-825b-a0ad55d32163 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2214.841982] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-11594836-58ba-43cf-825b-a0ad55d32163 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2214.843266] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79365d1e-c689-4ab8-9cb5-ef34fcd4e69d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2214.851394] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-11594836-58ba-43cf-825b-a0ad55d32163 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2214.852298] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7fccc541-e0a6-40eb-9f7d-5e4957764f1a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.040977] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-11594836-58ba-43cf-825b-a0ad55d32163 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2215.041220] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-11594836-58ba-43cf-825b-a0ad55d32163 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2215.041403] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-11594836-58ba-43cf-825b-a0ad55d32163 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Deleting the datastore file [datastore1] 233d087b-923e-46d4-a47f-b024583ce0f8 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2215.041662] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-39102058-c1cf-4817-b697-5d29188af10c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2215.049356] env[63024]: DEBUG oslo_vmware.api [None req-11594836-58ba-43cf-825b-a0ad55d32163 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2215.049356] env[63024]: value = "task-1951944" [ 2215.049356] env[63024]: _type = "Task" [ 2215.049356] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2215.056975] env[63024]: DEBUG oslo_vmware.api [None req-11594836-58ba-43cf-825b-a0ad55d32163 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951944, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2215.158451] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94c09d39-e7c6-413a-81c7-9de864ed3eaf tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "7d78b891-34c0-46dd-8b0d-ce80517232e1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.352s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2215.169917] env[63024]: DEBUG nova.compute.manager [req-4219bd6b-3c77-4aa7-9025-3c5265bf8975 req-ba5c71b7-13e4-40cd-b3cd-25ab012d0350 service nova] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Received event network-changed-4dc6f8a3-c6c8-459b-8a05-db8924adb128 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2215.170219] env[63024]: DEBUG nova.compute.manager [req-4219bd6b-3c77-4aa7-9025-3c5265bf8975 req-ba5c71b7-13e4-40cd-b3cd-25ab012d0350 service nova] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Refreshing instance network info cache due to event network-changed-4dc6f8a3-c6c8-459b-8a05-db8924adb128. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2215.170333] env[63024]: DEBUG oslo_concurrency.lockutils [req-4219bd6b-3c77-4aa7-9025-3c5265bf8975 req-ba5c71b7-13e4-40cd-b3cd-25ab012d0350 service nova] Acquiring lock "refresh_cache-7d78b891-34c0-46dd-8b0d-ce80517232e1" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2215.170515] env[63024]: DEBUG oslo_concurrency.lockutils [req-4219bd6b-3c77-4aa7-9025-3c5265bf8975 req-ba5c71b7-13e4-40cd-b3cd-25ab012d0350 service nova] Acquired lock "refresh_cache-7d78b891-34c0-46dd-8b0d-ce80517232e1" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2215.170657] env[63024]: DEBUG nova.network.neutron [req-4219bd6b-3c77-4aa7-9025-3c5265bf8975 req-ba5c71b7-13e4-40cd-b3cd-25ab012d0350 service nova] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Refreshing network info cache for port 4dc6f8a3-c6c8-459b-8a05-db8924adb128 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2215.201657] env[63024]: DEBUG oslo_concurrency.lockutils [None req-dc0a44bd-aa12-4121-aa5f-7ed2ac23bf7b tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lock "14bafeba-9f5b-4488-b29c-38939973deb9" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 25.698s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2215.202484] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lock "14bafeba-9f5b-4488-b29c-38939973deb9" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 4.139s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2215.202663] env[63024]: INFO nova.compute.manager [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Unshelving [ 2215.343654] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Acquiring lock "refresh_cache-6917758b-4b68-4a5a-b7e5-b2ffdade19d7" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2215.343654] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Acquired lock "refresh_cache-6917758b-4b68-4a5a-b7e5-b2ffdade19d7" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2215.343769] env[63024]: DEBUG nova.network.neutron [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2215.558851] env[63024]: DEBUG oslo_vmware.api [None req-11594836-58ba-43cf-825b-a0ad55d32163 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951944, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.208843} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2215.559134] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-11594836-58ba-43cf-825b-a0ad55d32163 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2215.559298] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-11594836-58ba-43cf-825b-a0ad55d32163 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2215.559476] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-11594836-58ba-43cf-825b-a0ad55d32163 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2215.559646] env[63024]: INFO nova.compute.manager [None req-11594836-58ba-43cf-825b-a0ad55d32163 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Took 0.72 seconds to destroy the instance on the hypervisor. [ 2215.559883] env[63024]: DEBUG oslo.service.loopingcall [None req-11594836-58ba-43cf-825b-a0ad55d32163 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2215.560090] env[63024]: DEBUG nova.compute.manager [-] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2215.560187] env[63024]: DEBUG nova.network.neutron [-] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2215.907491] env[63024]: DEBUG nova.network.neutron [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2216.095455] env[63024]: DEBUG nova.network.neutron [req-4219bd6b-3c77-4aa7-9025-3c5265bf8975 req-ba5c71b7-13e4-40cd-b3cd-25ab012d0350 service nova] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Updated VIF entry in instance network info cache for port 4dc6f8a3-c6c8-459b-8a05-db8924adb128. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2216.095742] env[63024]: DEBUG nova.network.neutron [req-4219bd6b-3c77-4aa7-9025-3c5265bf8975 req-ba5c71b7-13e4-40cd-b3cd-25ab012d0350 service nova] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Updating instance_info_cache with network_info: [{"id": "4dc6f8a3-c6c8-459b-8a05-db8924adb128", "address": "fa:16:3e:74:e2:14", "network": {"id": "27687c23-521d-4beb-ad4f-278994149c2c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-381619610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.138", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e53c02ad56640dc8cbc8839669b67bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "09bf081b-cdf0-4977-abe2-2339a87409ab", "external-id": "nsx-vlan-transportzone-378", "segmentation_id": 378, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4dc6f8a3-c6", "ovs_interfaceid": "4dc6f8a3-c6c8-459b-8a05-db8924adb128", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2216.122240] env[63024]: DEBUG nova.network.neutron [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] Updating instance_info_cache with network_info: [{"id": "b28e4901-8be8-43da-a093-669823ad01ad", "address": "fa:16:3e:d6:a6:b7", "network": {"id": "32802132-c2cf-4989-8199-1d395897645f", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1859785167-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ddb337ef934641bf88758506aa9b91ff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37333dc2-982e-45e9-9dda-0c18417d7fa6", "external-id": "nsx-vlan-transportzone-227", "segmentation_id": 227, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb28e4901-8b", "ovs_interfaceid": "b28e4901-8be8-43da-a093-669823ad01ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2216.212585] env[63024]: DEBUG nova.compute.utils [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2216.453223] env[63024]: DEBUG nova.network.neutron [-] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2216.598404] env[63024]: DEBUG oslo_concurrency.lockutils [req-4219bd6b-3c77-4aa7-9025-3c5265bf8975 req-ba5c71b7-13e4-40cd-b3cd-25ab012d0350 service nova] Releasing lock "refresh_cache-7d78b891-34c0-46dd-8b0d-ce80517232e1" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2216.625373] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Releasing lock "refresh_cache-6917758b-4b68-4a5a-b7e5-b2ffdade19d7" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2216.625734] env[63024]: DEBUG nova.compute.manager [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] Instance network_info: |[{"id": "b28e4901-8be8-43da-a093-669823ad01ad", "address": "fa:16:3e:d6:a6:b7", "network": {"id": "32802132-c2cf-4989-8199-1d395897645f", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1859785167-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ddb337ef934641bf88758506aa9b91ff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37333dc2-982e-45e9-9dda-0c18417d7fa6", "external-id": "nsx-vlan-transportzone-227", "segmentation_id": 227, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb28e4901-8b", "ovs_interfaceid": "b28e4901-8be8-43da-a093-669823ad01ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2216.626238] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d6:a6:b7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '37333dc2-982e-45e9-9dda-0c18417d7fa6', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b28e4901-8be8-43da-a093-669823ad01ad', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2216.634801] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Creating folder: Project (ddb337ef934641bf88758506aa9b91ff). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2216.635154] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2ad28502-3f99-47ad-a371-4b3d3a854a64 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2216.646928] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Created folder: Project (ddb337ef934641bf88758506aa9b91ff) in parent group-v401959. [ 2216.647148] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Creating folder: Instances. Parent ref: group-v402271. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2216.647406] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3e3e8dc8-9bf9-4461-b78a-76dc164d79a3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2216.659060] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Created folder: Instances in parent group-v402271. [ 2216.659328] env[63024]: DEBUG oslo.service.loopingcall [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2216.659533] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2216.659752] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9a1f3698-d426-4292-86f6-19e1235d3b57 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2216.679686] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2216.679686] env[63024]: value = "task-1951947" [ 2216.679686] env[63024]: _type = "Task" [ 2216.679686] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2216.687338] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951947, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2216.715730] env[63024]: INFO nova.virt.block_device [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Booting with volume 06603432-9319-4172-af00-cbb4469c359f at /dev/sdb [ 2216.754857] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bca889ee-bf38-4a6a-8789-9187cf171809 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2216.764849] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7e4c38f-77bf-4109-9c16-5eac524d366f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2216.778777] env[63024]: DEBUG nova.compute.manager [req-0e48dc13-7c46-4d79-be1e-374a420b5734 req-b62c6b0e-23a4-4e93-aaec-8071397da1d0 service nova] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] Received event network-changed-b28e4901-8be8-43da-a093-669823ad01ad {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2216.778983] env[63024]: DEBUG nova.compute.manager [req-0e48dc13-7c46-4d79-be1e-374a420b5734 req-b62c6b0e-23a4-4e93-aaec-8071397da1d0 service nova] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] Refreshing instance network info cache due to event network-changed-b28e4901-8be8-43da-a093-669823ad01ad. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2216.779218] env[63024]: DEBUG oslo_concurrency.lockutils [req-0e48dc13-7c46-4d79-be1e-374a420b5734 req-b62c6b0e-23a4-4e93-aaec-8071397da1d0 service nova] Acquiring lock "refresh_cache-6917758b-4b68-4a5a-b7e5-b2ffdade19d7" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2216.779364] env[63024]: DEBUG oslo_concurrency.lockutils [req-0e48dc13-7c46-4d79-be1e-374a420b5734 req-b62c6b0e-23a4-4e93-aaec-8071397da1d0 service nova] Acquired lock "refresh_cache-6917758b-4b68-4a5a-b7e5-b2ffdade19d7" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2216.779567] env[63024]: DEBUG nova.network.neutron [req-0e48dc13-7c46-4d79-be1e-374a420b5734 req-b62c6b0e-23a4-4e93-aaec-8071397da1d0 service nova] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] Refreshing network info cache for port b28e4901-8be8-43da-a093-669823ad01ad {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2216.797416] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-198ddd6c-ef23-4d46-8241-9f7b15e70da4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2216.805573] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6e387fb-580b-4555-9239-7fb0dc218f80 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2216.830892] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f425ce7-f39d-4867-847c-9337bc8f9b95 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2216.837232] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f91e8aef-1929-4c08-bb39-d36a1fc54286 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2216.850628] env[63024]: DEBUG nova.virt.block_device [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Updating existing volume attachment record: 91daad2c-4074-46f9-92ec-9e79126689e3 {{(pid=63024) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2216.955892] env[63024]: INFO nova.compute.manager [-] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Took 1.40 seconds to deallocate network for instance. [ 2217.190372] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951947, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2217.464202] env[63024]: DEBUG oslo_concurrency.lockutils [None req-11594836-58ba-43cf-825b-a0ad55d32163 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2217.464481] env[63024]: DEBUG oslo_concurrency.lockutils [None req-11594836-58ba-43cf-825b-a0ad55d32163 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2217.464697] env[63024]: DEBUG nova.objects.instance [None req-11594836-58ba-43cf-825b-a0ad55d32163 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lazy-loading 'resources' on Instance uuid 233d087b-923e-46d4-a47f-b024583ce0f8 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2217.497507] env[63024]: DEBUG nova.network.neutron [req-0e48dc13-7c46-4d79-be1e-374a420b5734 req-b62c6b0e-23a4-4e93-aaec-8071397da1d0 service nova] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] Updated VIF entry in instance network info cache for port b28e4901-8be8-43da-a093-669823ad01ad. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2217.497858] env[63024]: DEBUG nova.network.neutron [req-0e48dc13-7c46-4d79-be1e-374a420b5734 req-b62c6b0e-23a4-4e93-aaec-8071397da1d0 service nova] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] Updating instance_info_cache with network_info: [{"id": "b28e4901-8be8-43da-a093-669823ad01ad", "address": "fa:16:3e:d6:a6:b7", "network": {"id": "32802132-c2cf-4989-8199-1d395897645f", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-1859785167-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ddb337ef934641bf88758506aa9b91ff", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "37333dc2-982e-45e9-9dda-0c18417d7fa6", "external-id": "nsx-vlan-transportzone-227", "segmentation_id": 227, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb28e4901-8b", "ovs_interfaceid": "b28e4901-8be8-43da-a093-669823ad01ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2217.690787] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951947, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2218.001040] env[63024]: DEBUG oslo_concurrency.lockutils [req-0e48dc13-7c46-4d79-be1e-374a420b5734 req-b62c6b0e-23a4-4e93-aaec-8071397da1d0 service nova] Releasing lock "refresh_cache-6917758b-4b68-4a5a-b7e5-b2ffdade19d7" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2218.001341] env[63024]: DEBUG nova.compute.manager [req-0e48dc13-7c46-4d79-be1e-374a420b5734 req-b62c6b0e-23a4-4e93-aaec-8071397da1d0 service nova] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Received event network-vif-deleted-3cb168eb-0496-4b9c-9924-dddfc19369f8 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2218.049526] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27cde0d3-878e-4713-b780-af0fbf17c3bf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.057831] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-994b5505-2526-4fab-aa57-2272d0416ee9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.088987] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dc22f65-5035-42da-9617-abe43f1c8bae {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.096215] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3086afcb-5600-46dd-bc84-6b9941a0f061 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.109124] env[63024]: DEBUG nova.compute.provider_tree [None req-11594836-58ba-43cf-825b-a0ad55d32163 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2218.191574] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951947, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2218.612377] env[63024]: DEBUG nova.scheduler.client.report [None req-11594836-58ba-43cf-825b-a0ad55d32163 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2218.691883] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951947, 'name': CreateVM_Task, 'duration_secs': 1.600165} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2218.692089] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2218.692804] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2218.693017] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2218.693382] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2218.693679] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c504704-e17b-449d-9cff-5bbd326900ad {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2218.698265] env[63024]: DEBUG oslo_vmware.api [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Waiting for the task: (returnval){ [ 2218.698265] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52ef2f0a-2604-acc2-fcd9-ef9199e0f2c4" [ 2218.698265] env[63024]: _type = "Task" [ 2218.698265] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2218.705807] env[63024]: DEBUG oslo_vmware.api [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52ef2f0a-2604-acc2-fcd9-ef9199e0f2c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2219.117433] env[63024]: DEBUG oslo_concurrency.lockutils [None req-11594836-58ba-43cf-825b-a0ad55d32163 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.653s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2219.137929] env[63024]: INFO nova.scheduler.client.report [None req-11594836-58ba-43cf-825b-a0ad55d32163 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Deleted allocations for instance 233d087b-923e-46d4-a47f-b024583ce0f8 [ 2219.209579] env[63024]: DEBUG oslo_vmware.api [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52ef2f0a-2604-acc2-fcd9-ef9199e0f2c4, 'name': SearchDatastore_Task, 'duration_secs': 0.00999} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2219.209878] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2219.210125] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2219.210360] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2219.210507] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2219.210687] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2219.210944] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dd2f4440-810f-4072-9bf5-b245368b2bf5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2219.219444] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2219.219616] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2219.220483] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e7e8bb7-1463-48e8-b4e2-5a60fc61b85a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2219.225048] env[63024]: DEBUG oslo_vmware.api [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Waiting for the task: (returnval){ [ 2219.225048] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5247f082-d716-f2ae-74be-1def1a7798fb" [ 2219.225048] env[63024]: _type = "Task" [ 2219.225048] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2219.232070] env[63024]: DEBUG oslo_vmware.api [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5247f082-d716-f2ae-74be-1def1a7798fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2219.647128] env[63024]: DEBUG oslo_concurrency.lockutils [None req-11594836-58ba-43cf-825b-a0ad55d32163 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "233d087b-923e-46d4-a47f-b024583ce0f8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.317s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2219.736636] env[63024]: DEBUG oslo_vmware.api [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5247f082-d716-f2ae-74be-1def1a7798fb, 'name': SearchDatastore_Task, 'duration_secs': 0.008223} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2219.737438] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-086f8aa1-7db0-41e5-a09f-a9b0f1a6bd94 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2219.743333] env[63024]: DEBUG oslo_vmware.api [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Waiting for the task: (returnval){ [ 2219.743333] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52433b4f-2076-ddba-21d3-9fac927b2d75" [ 2219.743333] env[63024]: _type = "Task" [ 2219.743333] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2219.754259] env[63024]: DEBUG oslo_vmware.api [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52433b4f-2076-ddba-21d3-9fac927b2d75, 'name': SearchDatastore_Task, 'duration_secs': 0.009189} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2219.754479] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2219.754723] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 6917758b-4b68-4a5a-b7e5-b2ffdade19d7/6917758b-4b68-4a5a-b7e5-b2ffdade19d7.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2219.754959] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a8797941-9164-49f2-a0c3-4eb297c2dc67 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2219.761752] env[63024]: DEBUG oslo_vmware.api [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Waiting for the task: (returnval){ [ 2219.761752] env[63024]: value = "task-1951952" [ 2219.761752] env[63024]: _type = "Task" [ 2219.761752] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2219.769576] env[63024]: DEBUG oslo_vmware.api [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Task: {'id': task-1951952, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2220.272483] env[63024]: DEBUG oslo_vmware.api [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Task: {'id': task-1951952, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.439881} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2220.272944] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 6917758b-4b68-4a5a-b7e5-b2ffdade19d7/6917758b-4b68-4a5a-b7e5-b2ffdade19d7.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2220.273067] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2220.273323] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b6f13f6f-2523-41d8-9e7c-792e7ecb64ca {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.281188] env[63024]: DEBUG oslo_vmware.api [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Waiting for the task: (returnval){ [ 2220.281188] env[63024]: value = "task-1951953" [ 2220.281188] env[63024]: _type = "Task" [ 2220.281188] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2220.288687] env[63024]: DEBUG oslo_vmware.api [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Task: {'id': task-1951953, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2220.296523] env[63024]: DEBUG oslo_concurrency.lockutils [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "1013a279-f79d-467e-a37e-7e66f77db625" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2220.296738] env[63024]: DEBUG oslo_concurrency.lockutils [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "1013a279-f79d-467e-a37e-7e66f77db625" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2220.790675] env[63024]: DEBUG oslo_vmware.api [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Task: {'id': task-1951953, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066876} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2220.790949] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2220.791733] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2f8746c-2441-4ac0-b548-51e1be64f066 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.805184] env[63024]: DEBUG nova.compute.manager [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2220.815978] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] 6917758b-4b68-4a5a-b7e5-b2ffdade19d7/6917758b-4b68-4a5a-b7e5-b2ffdade19d7.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2220.816728] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0f92b368-e134-4d00-a925-f68573c35d4d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2220.836330] env[63024]: DEBUG oslo_vmware.api [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Waiting for the task: (returnval){ [ 2220.836330] env[63024]: value = "task-1951954" [ 2220.836330] env[63024]: _type = "Task" [ 2220.836330] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2220.845631] env[63024]: DEBUG oslo_vmware.api [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Task: {'id': task-1951954, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2221.337216] env[63024]: DEBUG oslo_concurrency.lockutils [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2221.337520] env[63024]: DEBUG oslo_concurrency.lockutils [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2221.339369] env[63024]: INFO nova.compute.claims [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2221.352115] env[63024]: DEBUG oslo_vmware.api [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Task: {'id': task-1951954, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2221.855444] env[63024]: DEBUG oslo_vmware.api [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Task: {'id': task-1951954, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2222.355362] env[63024]: DEBUG oslo_vmware.api [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Task: {'id': task-1951954, 'name': ReconfigVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2222.433109] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54839f67-3398-44f3-b7f0-5a399ebb1f84 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.441602] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c1526ea-8157-4d2a-8419-bf2276d3a576 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.471049] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45a5c130-73d0-48dd-b62f-8694a09cafa3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.477892] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6714c792-371a-4603-a6c0-5292b91ce833 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.490677] env[63024]: DEBUG nova.compute.provider_tree [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2222.852696] env[63024]: DEBUG oslo_vmware.api [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Task: {'id': task-1951954, 'name': ReconfigVM_Task, 'duration_secs': 1.803387} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2222.853061] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] Reconfigured VM instance instance-00000079 to attach disk [datastore1] 6917758b-4b68-4a5a-b7e5-b2ffdade19d7/6917758b-4b68-4a5a-b7e5-b2ffdade19d7.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2222.853652] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-795ac6f8-87b2-4f1f-8ed2-2eb150933141 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2222.860077] env[63024]: DEBUG oslo_vmware.api [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Waiting for the task: (returnval){ [ 2222.860077] env[63024]: value = "task-1951955" [ 2222.860077] env[63024]: _type = "Task" [ 2222.860077] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2222.869773] env[63024]: DEBUG oslo_vmware.api [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Task: {'id': task-1951955, 'name': Rename_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2222.940669] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2223.010467] env[63024]: ERROR nova.scheduler.client.report [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [req-9bdc4d08-c456-428e-9f73-b7bc24b85d40] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 89dfa68a-133e-436f-a9f1-86051f9fb96b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-9bdc4d08-c456-428e-9f73-b7bc24b85d40"}]} [ 2223.026238] env[63024]: DEBUG nova.scheduler.client.report [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Refreshing inventories for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 2223.039227] env[63024]: DEBUG nova.scheduler.client.report [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Updating ProviderTree inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 2223.039227] env[63024]: DEBUG nova.compute.provider_tree [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2223.049254] env[63024]: DEBUG nova.scheduler.client.report [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Refreshing aggregate associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, aggregates: None {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 2223.066336] env[63024]: DEBUG nova.scheduler.client.report [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Refreshing trait associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 2223.149450] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4b5d109-2ce0-4635-bd2c-6a2d03d5e185 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2223.157328] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-882298a5-ad6d-4014-b8f8-3ec7eba2d2ea {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2223.187276] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2022b91-2667-4b7a-af4b-38d330effd05 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2223.195079] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bff3df53-2480-4436-b317-87a560858500 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2223.208579] env[63024]: DEBUG nova.compute.provider_tree [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2223.371035] env[63024]: DEBUG oslo_vmware.api [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Task: {'id': task-1951955, 'name': Rename_Task, 'duration_secs': 0.12892} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2223.371449] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2223.371552] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-789822ef-23f3-4d77-b395-9b1375f12039 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2223.378985] env[63024]: DEBUG oslo_vmware.api [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Waiting for the task: (returnval){ [ 2223.378985] env[63024]: value = "task-1951956" [ 2223.378985] env[63024]: _type = "Task" [ 2223.378985] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2223.387213] env[63024]: DEBUG oslo_vmware.api [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Task: {'id': task-1951956, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2223.739034] env[63024]: DEBUG nova.scheduler.client.report [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Updated inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with generation 188 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 2223.739313] env[63024]: DEBUG nova.compute.provider_tree [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Updating resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b generation from 188 to 189 during operation: update_inventory {{(pid=63024) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2223.739494] env[63024]: DEBUG nova.compute.provider_tree [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2223.890558] env[63024]: DEBUG oslo_vmware.api [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Task: {'id': task-1951956, 'name': PowerOnVM_Task} progress is 74%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2224.245499] env[63024]: DEBUG oslo_concurrency.lockutils [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.908s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2224.246289] env[63024]: DEBUG nova.compute.manager [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2224.250339] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.310s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2224.250685] env[63024]: DEBUG nova.objects.instance [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lazy-loading 'pci_requests' on Instance uuid 14bafeba-9f5b-4488-b29c-38939973deb9 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2224.390773] env[63024]: DEBUG oslo_vmware.api [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Task: {'id': task-1951956, 'name': PowerOnVM_Task, 'duration_secs': 0.626692} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2224.391184] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2224.391319] env[63024]: INFO nova.compute.manager [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] Took 10.60 seconds to spawn the instance on the hypervisor. [ 2224.391528] env[63024]: DEBUG nova.compute.manager [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2224.392311] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-935ca8f7-ded7-45f5-adfa-f6cabdba9e06 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2224.753993] env[63024]: DEBUG nova.compute.utils [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2224.755483] env[63024]: DEBUG nova.compute.manager [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2224.755648] env[63024]: DEBUG nova.network.neutron [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2224.759174] env[63024]: DEBUG nova.objects.instance [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lazy-loading 'numa_topology' on Instance uuid 14bafeba-9f5b-4488-b29c-38939973deb9 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2224.799267] env[63024]: DEBUG nova.policy [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a7c32db2d81e40c492c1362d8356a03c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '93098ad83ae144bf90a12c97ec863c06', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 2224.908727] env[63024]: INFO nova.compute.manager [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] Took 18.08 seconds to build instance. [ 2225.061577] env[63024]: DEBUG nova.network.neutron [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Successfully created port: 776a40cb-b4fc-40ff-9fda-a77dca4c0001 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2225.261689] env[63024]: DEBUG nova.compute.manager [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2225.268021] env[63024]: INFO nova.compute.claims [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2225.410636] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4aad3b62-2d71-4837-b517-69bd7efbd33a tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Lock "6917758b-4b68-4a5a-b7e5-b2ffdade19d7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.596s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2226.057437] env[63024]: DEBUG oslo_concurrency.lockutils [None req-331980db-3e87-42c5-b6f3-5c09da0c7977 tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Acquiring lock "6917758b-4b68-4a5a-b7e5-b2ffdade19d7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2226.057750] env[63024]: DEBUG oslo_concurrency.lockutils [None req-331980db-3e87-42c5-b6f3-5c09da0c7977 tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Lock "6917758b-4b68-4a5a-b7e5-b2ffdade19d7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2226.057968] env[63024]: DEBUG oslo_concurrency.lockutils [None req-331980db-3e87-42c5-b6f3-5c09da0c7977 tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Acquiring lock "6917758b-4b68-4a5a-b7e5-b2ffdade19d7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2226.058169] env[63024]: DEBUG oslo_concurrency.lockutils [None req-331980db-3e87-42c5-b6f3-5c09da0c7977 tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Lock "6917758b-4b68-4a5a-b7e5-b2ffdade19d7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2226.058338] env[63024]: DEBUG oslo_concurrency.lockutils [None req-331980db-3e87-42c5-b6f3-5c09da0c7977 tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Lock "6917758b-4b68-4a5a-b7e5-b2ffdade19d7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2226.060422] env[63024]: INFO nova.compute.manager [None req-331980db-3e87-42c5-b6f3-5c09da0c7977 tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] Terminating instance [ 2226.274874] env[63024]: DEBUG nova.compute.manager [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2226.308501] env[63024]: DEBUG nova.virt.hardware [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2226.308753] env[63024]: DEBUG nova.virt.hardware [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2226.308912] env[63024]: DEBUG nova.virt.hardware [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2226.309119] env[63024]: DEBUG nova.virt.hardware [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2226.309272] env[63024]: DEBUG nova.virt.hardware [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2226.309424] env[63024]: DEBUG nova.virt.hardware [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2226.309632] env[63024]: DEBUG nova.virt.hardware [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2226.309793] env[63024]: DEBUG nova.virt.hardware [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2226.309958] env[63024]: DEBUG nova.virt.hardware [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2226.310133] env[63024]: DEBUG nova.virt.hardware [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2226.310309] env[63024]: DEBUG nova.virt.hardware [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2226.311231] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ea73cd5-b89c-4a21-9769-e2fc445aa1b4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.322459] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4112709b-8c28-48ff-ba35-ca248dc9049f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.372856] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4e06917-c9b6-4213-a6ba-36b4850a2e9b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.380484] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8bcd2c6-47ef-4d99-81d3-36a0e01e7a63 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.410110] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-002add06-c2a1-4703-8faf-3966e5527b34 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.417356] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7997d0f-d0f0-4bcf-89a9-6301bf614a76 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.430165] env[63024]: DEBUG nova.compute.provider_tree [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2226.565080] env[63024]: DEBUG nova.compute.manager [None req-331980db-3e87-42c5-b6f3-5c09da0c7977 tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2226.565775] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-331980db-3e87-42c5-b6f3-5c09da0c7977 tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2226.566255] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3e5532c-0d46-49db-96d6-3f5ae63353da {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.574391] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-331980db-3e87-42c5-b6f3-5c09da0c7977 tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2226.574658] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fd630462-eb4e-4dd0-be30-d9de1267110e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2226.581762] env[63024]: DEBUG oslo_vmware.api [None req-331980db-3e87-42c5-b6f3-5c09da0c7977 tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Waiting for the task: (returnval){ [ 2226.581762] env[63024]: value = "task-1951957" [ 2226.581762] env[63024]: _type = "Task" [ 2226.581762] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2226.587648] env[63024]: DEBUG nova.compute.manager [req-51855525-3dcc-4ba3-a387-00c87fc1349b req-d4069635-62cd-40a0-bddf-5bc3236c9b2f service nova] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Received event network-vif-plugged-776a40cb-b4fc-40ff-9fda-a77dca4c0001 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2226.587878] env[63024]: DEBUG oslo_concurrency.lockutils [req-51855525-3dcc-4ba3-a387-00c87fc1349b req-d4069635-62cd-40a0-bddf-5bc3236c9b2f service nova] Acquiring lock "1013a279-f79d-467e-a37e-7e66f77db625-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2226.588097] env[63024]: DEBUG oslo_concurrency.lockutils [req-51855525-3dcc-4ba3-a387-00c87fc1349b req-d4069635-62cd-40a0-bddf-5bc3236c9b2f service nova] Lock "1013a279-f79d-467e-a37e-7e66f77db625-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2226.588286] env[63024]: DEBUG oslo_concurrency.lockutils [req-51855525-3dcc-4ba3-a387-00c87fc1349b req-d4069635-62cd-40a0-bddf-5bc3236c9b2f service nova] Lock "1013a279-f79d-467e-a37e-7e66f77db625-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2226.588383] env[63024]: DEBUG nova.compute.manager [req-51855525-3dcc-4ba3-a387-00c87fc1349b req-d4069635-62cd-40a0-bddf-5bc3236c9b2f service nova] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] No waiting events found dispatching network-vif-plugged-776a40cb-b4fc-40ff-9fda-a77dca4c0001 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2226.588545] env[63024]: WARNING nova.compute.manager [req-51855525-3dcc-4ba3-a387-00c87fc1349b req-d4069635-62cd-40a0-bddf-5bc3236c9b2f service nova] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Received unexpected event network-vif-plugged-776a40cb-b4fc-40ff-9fda-a77dca4c0001 for instance with vm_state building and task_state spawning. [ 2226.591972] env[63024]: DEBUG oslo_vmware.api [None req-331980db-3e87-42c5-b6f3-5c09da0c7977 tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Task: {'id': task-1951957, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2226.688484] env[63024]: DEBUG nova.network.neutron [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Successfully updated port: 776a40cb-b4fc-40ff-9fda-a77dca4c0001 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2226.933145] env[63024]: DEBUG nova.scheduler.client.report [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2227.091763] env[63024]: DEBUG oslo_vmware.api [None req-331980db-3e87-42c5-b6f3-5c09da0c7977 tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Task: {'id': task-1951957, 'name': PowerOffVM_Task, 'duration_secs': 0.203156} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2227.092014] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-331980db-3e87-42c5-b6f3-5c09da0c7977 tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2227.092193] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-331980db-3e87-42c5-b6f3-5c09da0c7977 tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2227.092468] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-863326d0-bff1-4427-b7f3-8f3a50656877 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.191391] env[63024]: DEBUG oslo_concurrency.lockutils [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "refresh_cache-1013a279-f79d-467e-a37e-7e66f77db625" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2227.191549] env[63024]: DEBUG oslo_concurrency.lockutils [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquired lock "refresh_cache-1013a279-f79d-467e-a37e-7e66f77db625" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2227.191702] env[63024]: DEBUG nova.network.neutron [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2227.223456] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-331980db-3e87-42c5-b6f3-5c09da0c7977 tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2227.223677] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-331980db-3e87-42c5-b6f3-5c09da0c7977 tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2227.223859] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-331980db-3e87-42c5-b6f3-5c09da0c7977 tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Deleting the datastore file [datastore1] 6917758b-4b68-4a5a-b7e5-b2ffdade19d7 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2227.224140] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-95997601-3797-4452-abf9-948847e0f4ce {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2227.231169] env[63024]: DEBUG oslo_vmware.api [None req-331980db-3e87-42c5-b6f3-5c09da0c7977 tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Waiting for the task: (returnval){ [ 2227.231169] env[63024]: value = "task-1951959" [ 2227.231169] env[63024]: _type = "Task" [ 2227.231169] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2227.238534] env[63024]: DEBUG oslo_vmware.api [None req-331980db-3e87-42c5-b6f3-5c09da0c7977 tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Task: {'id': task-1951959, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2227.439053] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.188s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2227.465504] env[63024]: INFO nova.network.neutron [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Updating port 4ba24717-2947-46f0-9df8-733d8b40c345 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 2227.720351] env[63024]: DEBUG nova.network.neutron [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2227.741678] env[63024]: DEBUG oslo_vmware.api [None req-331980db-3e87-42c5-b6f3-5c09da0c7977 tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Task: {'id': task-1951959, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.24966} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2227.741975] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-331980db-3e87-42c5-b6f3-5c09da0c7977 tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2227.742211] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-331980db-3e87-42c5-b6f3-5c09da0c7977 tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2227.742296] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-331980db-3e87-42c5-b6f3-5c09da0c7977 tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2227.742453] env[63024]: INFO nova.compute.manager [None req-331980db-3e87-42c5-b6f3-5c09da0c7977 tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] Took 1.18 seconds to destroy the instance on the hypervisor. [ 2227.742670] env[63024]: DEBUG oslo.service.loopingcall [None req-331980db-3e87-42c5-b6f3-5c09da0c7977 tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2227.742854] env[63024]: DEBUG nova.compute.manager [-] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2227.742954] env[63024]: DEBUG nova.network.neutron [-] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2227.967243] env[63024]: DEBUG nova.network.neutron [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Updating instance_info_cache with network_info: [{"id": "776a40cb-b4fc-40ff-9fda-a77dca4c0001", "address": "fa:16:3e:83:a3:48", "network": {"id": "c25a78c5-2c6d-4c87-8575-9620fbc983bd", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1693993539-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93098ad83ae144bf90a12c97ec863c06", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e66c4ebe-f808-4b34-bdb5-6c45edb1736f", "external-id": "cl2-zone-719", "segmentation_id": 719, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap776a40cb-b4", "ovs_interfaceid": "776a40cb-b4fc-40ff-9fda-a77dca4c0001", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2228.469819] env[63024]: DEBUG oslo_concurrency.lockutils [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Releasing lock "refresh_cache-1013a279-f79d-467e-a37e-7e66f77db625" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2228.470224] env[63024]: DEBUG nova.compute.manager [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Instance network_info: |[{"id": "776a40cb-b4fc-40ff-9fda-a77dca4c0001", "address": "fa:16:3e:83:a3:48", "network": {"id": "c25a78c5-2c6d-4c87-8575-9620fbc983bd", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1693993539-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93098ad83ae144bf90a12c97ec863c06", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e66c4ebe-f808-4b34-bdb5-6c45edb1736f", "external-id": "cl2-zone-719", "segmentation_id": 719, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap776a40cb-b4", "ovs_interfaceid": "776a40cb-b4fc-40ff-9fda-a77dca4c0001", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2228.470623] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:83:a3:48', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e66c4ebe-f808-4b34-bdb5-6c45edb1736f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '776a40cb-b4fc-40ff-9fda-a77dca4c0001', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2228.478012] env[63024]: DEBUG oslo.service.loopingcall [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2228.478262] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2228.478497] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b94b807b-0ca9-45d5-80fa-bdc73ce2138b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2228.497836] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2228.497836] env[63024]: value = "task-1951960" [ 2228.497836] env[63024]: _type = "Task" [ 2228.497836] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2228.505557] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951960, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2228.590517] env[63024]: DEBUG nova.network.neutron [-] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2228.689166] env[63024]: DEBUG nova.compute.manager [req-2f4dcd47-3310-4259-b5e5-4bfb005b70da req-95600988-eacc-4ada-8712-358b7c4e0caf service nova] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Received event network-changed-776a40cb-b4fc-40ff-9fda-a77dca4c0001 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2228.689166] env[63024]: DEBUG nova.compute.manager [req-2f4dcd47-3310-4259-b5e5-4bfb005b70da req-95600988-eacc-4ada-8712-358b7c4e0caf service nova] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Refreshing instance network info cache due to event network-changed-776a40cb-b4fc-40ff-9fda-a77dca4c0001. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2228.698820] env[63024]: DEBUG oslo_concurrency.lockutils [req-2f4dcd47-3310-4259-b5e5-4bfb005b70da req-95600988-eacc-4ada-8712-358b7c4e0caf service nova] Acquiring lock "refresh_cache-1013a279-f79d-467e-a37e-7e66f77db625" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2228.698820] env[63024]: DEBUG oslo_concurrency.lockutils [req-2f4dcd47-3310-4259-b5e5-4bfb005b70da req-95600988-eacc-4ada-8712-358b7c4e0caf service nova] Acquired lock "refresh_cache-1013a279-f79d-467e-a37e-7e66f77db625" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2228.698820] env[63024]: DEBUG nova.network.neutron [req-2f4dcd47-3310-4259-b5e5-4bfb005b70da req-95600988-eacc-4ada-8712-358b7c4e0caf service nova] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Refreshing network info cache for port 776a40cb-b4fc-40ff-9fda-a77dca4c0001 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2228.949842] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquiring lock "refresh_cache-14bafeba-9f5b-4488-b29c-38939973deb9" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2228.950031] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquired lock "refresh_cache-14bafeba-9f5b-4488-b29c-38939973deb9" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2228.950112] env[63024]: DEBUG nova.network.neutron [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2229.007098] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951960, 'name': CreateVM_Task, 'duration_secs': 0.500034} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2229.007280] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2229.008211] env[63024]: DEBUG oslo_concurrency.lockutils [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2229.008211] env[63024]: DEBUG oslo_concurrency.lockutils [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2229.008466] env[63024]: DEBUG oslo_concurrency.lockutils [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2229.008720] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bde84d80-8670-4b19-8f3d-704c4c20d5bb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2229.013123] env[63024]: DEBUG oslo_vmware.api [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2229.013123] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5286c441-571b-2816-327f-5e36446ec881" [ 2229.013123] env[63024]: _type = "Task" [ 2229.013123] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2229.020716] env[63024]: DEBUG oslo_vmware.api [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5286c441-571b-2816-327f-5e36446ec881, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2229.093483] env[63024]: INFO nova.compute.manager [-] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] Took 1.35 seconds to deallocate network for instance. [ 2229.378205] env[63024]: DEBUG nova.network.neutron [req-2f4dcd47-3310-4259-b5e5-4bfb005b70da req-95600988-eacc-4ada-8712-358b7c4e0caf service nova] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Updated VIF entry in instance network info cache for port 776a40cb-b4fc-40ff-9fda-a77dca4c0001. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2229.378584] env[63024]: DEBUG nova.network.neutron [req-2f4dcd47-3310-4259-b5e5-4bfb005b70da req-95600988-eacc-4ada-8712-358b7c4e0caf service nova] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Updating instance_info_cache with network_info: [{"id": "776a40cb-b4fc-40ff-9fda-a77dca4c0001", "address": "fa:16:3e:83:a3:48", "network": {"id": "c25a78c5-2c6d-4c87-8575-9620fbc983bd", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1693993539-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93098ad83ae144bf90a12c97ec863c06", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e66c4ebe-f808-4b34-bdb5-6c45edb1736f", "external-id": "cl2-zone-719", "segmentation_id": 719, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap776a40cb-b4", "ovs_interfaceid": "776a40cb-b4fc-40ff-9fda-a77dca4c0001", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2229.523519] env[63024]: DEBUG oslo_vmware.api [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5286c441-571b-2816-327f-5e36446ec881, 'name': SearchDatastore_Task, 'duration_secs': 0.008467} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2229.523829] env[63024]: DEBUG oslo_concurrency.lockutils [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2229.524061] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2229.524296] env[63024]: DEBUG oslo_concurrency.lockutils [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2229.524445] env[63024]: DEBUG oslo_concurrency.lockutils [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2229.524624] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2229.524877] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-288c325d-5316-48bd-8405-eeb17d4f97c4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2229.532858] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2229.533040] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2229.533805] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a779ecd7-66c4-4d85-93f7-5f1ebf9ceae1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2229.538532] env[63024]: DEBUG oslo_vmware.api [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2229.538532] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52c5f8aa-71d6-f6b2-87d6-1fd2e156ce03" [ 2229.538532] env[63024]: _type = "Task" [ 2229.538532] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2229.547580] env[63024]: DEBUG oslo_vmware.api [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52c5f8aa-71d6-f6b2-87d6-1fd2e156ce03, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2229.600234] env[63024]: DEBUG oslo_concurrency.lockutils [None req-331980db-3e87-42c5-b6f3-5c09da0c7977 tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2229.600488] env[63024]: DEBUG oslo_concurrency.lockutils [None req-331980db-3e87-42c5-b6f3-5c09da0c7977 tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2229.600711] env[63024]: DEBUG nova.objects.instance [None req-331980db-3e87-42c5-b6f3-5c09da0c7977 tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Lazy-loading 'resources' on Instance uuid 6917758b-4b68-4a5a-b7e5-b2ffdade19d7 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2229.646998] env[63024]: DEBUG nova.network.neutron [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Updating instance_info_cache with network_info: [{"id": "4ba24717-2947-46f0-9df8-733d8b40c345", "address": "fa:16:3e:3b:41:f5", "network": {"id": "5503f5f5-9a63-4bb8-bc39-97863100c3df", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1218611362-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd0c44555e30414c83750b762e243dc1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ba24717-29", "ovs_interfaceid": "4ba24717-2947-46f0-9df8-733d8b40c345", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2229.881346] env[63024]: DEBUG oslo_concurrency.lockutils [req-2f4dcd47-3310-4259-b5e5-4bfb005b70da req-95600988-eacc-4ada-8712-358b7c4e0caf service nova] Releasing lock "refresh_cache-1013a279-f79d-467e-a37e-7e66f77db625" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2229.881655] env[63024]: DEBUG nova.compute.manager [req-2f4dcd47-3310-4259-b5e5-4bfb005b70da req-95600988-eacc-4ada-8712-358b7c4e0caf service nova] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] Received event network-vif-deleted-b28e4901-8be8-43da-a093-669823ad01ad {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2230.049078] env[63024]: DEBUG oslo_vmware.api [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52c5f8aa-71d6-f6b2-87d6-1fd2e156ce03, 'name': SearchDatastore_Task, 'duration_secs': 0.007913} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2230.049890] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de0e5311-822f-4fb3-8e74-abd7bc566ffe {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.054935] env[63024]: DEBUG oslo_vmware.api [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2230.054935] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5249c94c-1144-7704-0b12-d2a274ad92d9" [ 2230.054935] env[63024]: _type = "Task" [ 2230.054935] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2230.062537] env[63024]: DEBUG oslo_vmware.api [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5249c94c-1144-7704-0b12-d2a274ad92d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2230.149221] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Releasing lock "refresh_cache-14bafeba-9f5b-4488-b29c-38939973deb9" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2230.174998] env[63024]: DEBUG nova.virt.hardware [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='14d8a67d65ba4894c3f6aa94d3f7eb22',container_format='bare',created_at=2024-12-22T11:17:17Z,direct_url=,disk_format='vmdk',id=bca855b4-4ff0-428a-971b-15d087bf0880,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-37877165-shelved',owner='dd0c44555e30414c83750b762e243dc1',properties=ImageMetaProps,protected=,size=31669248,status='active',tags=,updated_at=2024-12-22T11:17:31Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2230.175336] env[63024]: DEBUG nova.virt.hardware [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2230.175515] env[63024]: DEBUG nova.virt.hardware [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2230.175703] env[63024]: DEBUG nova.virt.hardware [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2230.175850] env[63024]: DEBUG nova.virt.hardware [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2230.175999] env[63024]: DEBUG nova.virt.hardware [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2230.176221] env[63024]: DEBUG nova.virt.hardware [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2230.176382] env[63024]: DEBUG nova.virt.hardware [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2230.176551] env[63024]: DEBUG nova.virt.hardware [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2230.176724] env[63024]: DEBUG nova.virt.hardware [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2230.176998] env[63024]: DEBUG nova.virt.hardware [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2230.178188] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b95a78fa-8f28-4c6b-89a0-017b140c512f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.181740] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0218dea6-b105-4842-a1b2-e99eca95cb41 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.190882] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9e0ce4e-faf5-4225-b49d-4ddeb4790c75 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.194785] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9fe00df-d810-481c-bc58-fe37b7d44024 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.207198] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3b:41:f5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '55c757ac-f8b2-466d-b634-07dbd100b312', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4ba24717-2947-46f0-9df8-733d8b40c345', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2230.214450] env[63024]: DEBUG oslo.service.loopingcall [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2230.239023] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2230.239464] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-70329e50-aaaa-4307-84cf-21b56ada1219 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.254143] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d65ef951-cac8-483f-af64-4dac2ee67b93 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.261803] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c263c7b-c1bd-49e3-ad19-17c30099a2c6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.266191] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2230.266191] env[63024]: value = "task-1951961" [ 2230.266191] env[63024]: _type = "Task" [ 2230.266191] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2230.276699] env[63024]: DEBUG nova.compute.provider_tree [None req-331980db-3e87-42c5-b6f3-5c09da0c7977 tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2230.282931] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951961, 'name': CreateVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2230.565358] env[63024]: DEBUG oslo_vmware.api [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5249c94c-1144-7704-0b12-d2a274ad92d9, 'name': SearchDatastore_Task, 'duration_secs': 0.009373} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2230.565754] env[63024]: DEBUG oslo_concurrency.lockutils [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2230.565949] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 1013a279-f79d-467e-a37e-7e66f77db625/1013a279-f79d-467e-a37e-7e66f77db625.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2230.566266] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-275f42fc-1ea5-4f59-b6da-48962271cd41 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.573906] env[63024]: DEBUG oslo_vmware.api [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2230.573906] env[63024]: value = "task-1951962" [ 2230.573906] env[63024]: _type = "Task" [ 2230.573906] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2230.581758] env[63024]: DEBUG oslo_vmware.api [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951962, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2230.714413] env[63024]: DEBUG nova.compute.manager [req-63fdc89e-6c38-4c46-a887-6c619e4b8be8 req-c3ef98de-d9be-4d0f-95ab-e75b41a837e4 service nova] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Received event network-vif-plugged-4ba24717-2947-46f0-9df8-733d8b40c345 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2230.714823] env[63024]: DEBUG oslo_concurrency.lockutils [req-63fdc89e-6c38-4c46-a887-6c619e4b8be8 req-c3ef98de-d9be-4d0f-95ab-e75b41a837e4 service nova] Acquiring lock "14bafeba-9f5b-4488-b29c-38939973deb9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2230.715220] env[63024]: DEBUG oslo_concurrency.lockutils [req-63fdc89e-6c38-4c46-a887-6c619e4b8be8 req-c3ef98de-d9be-4d0f-95ab-e75b41a837e4 service nova] Lock "14bafeba-9f5b-4488-b29c-38939973deb9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2230.715552] env[63024]: DEBUG oslo_concurrency.lockutils [req-63fdc89e-6c38-4c46-a887-6c619e4b8be8 req-c3ef98de-d9be-4d0f-95ab-e75b41a837e4 service nova] Lock "14bafeba-9f5b-4488-b29c-38939973deb9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2230.715850] env[63024]: DEBUG nova.compute.manager [req-63fdc89e-6c38-4c46-a887-6c619e4b8be8 req-c3ef98de-d9be-4d0f-95ab-e75b41a837e4 service nova] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] No waiting events found dispatching network-vif-plugged-4ba24717-2947-46f0-9df8-733d8b40c345 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2230.716171] env[63024]: WARNING nova.compute.manager [req-63fdc89e-6c38-4c46-a887-6c619e4b8be8 req-c3ef98de-d9be-4d0f-95ab-e75b41a837e4 service nova] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Received unexpected event network-vif-plugged-4ba24717-2947-46f0-9df8-733d8b40c345 for instance with vm_state shelved_offloaded and task_state spawning. [ 2230.716489] env[63024]: DEBUG nova.compute.manager [req-63fdc89e-6c38-4c46-a887-6c619e4b8be8 req-c3ef98de-d9be-4d0f-95ab-e75b41a837e4 service nova] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Received event network-changed-4ba24717-2947-46f0-9df8-733d8b40c345 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2230.716785] env[63024]: DEBUG nova.compute.manager [req-63fdc89e-6c38-4c46-a887-6c619e4b8be8 req-c3ef98de-d9be-4d0f-95ab-e75b41a837e4 service nova] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Refreshing instance network info cache due to event network-changed-4ba24717-2947-46f0-9df8-733d8b40c345. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2230.717121] env[63024]: DEBUG oslo_concurrency.lockutils [req-63fdc89e-6c38-4c46-a887-6c619e4b8be8 req-c3ef98de-d9be-4d0f-95ab-e75b41a837e4 service nova] Acquiring lock "refresh_cache-14bafeba-9f5b-4488-b29c-38939973deb9" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2230.717386] env[63024]: DEBUG oslo_concurrency.lockutils [req-63fdc89e-6c38-4c46-a887-6c619e4b8be8 req-c3ef98de-d9be-4d0f-95ab-e75b41a837e4 service nova] Acquired lock "refresh_cache-14bafeba-9f5b-4488-b29c-38939973deb9" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2230.717669] env[63024]: DEBUG nova.network.neutron [req-63fdc89e-6c38-4c46-a887-6c619e4b8be8 req-c3ef98de-d9be-4d0f-95ab-e75b41a837e4 service nova] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Refreshing network info cache for port 4ba24717-2947-46f0-9df8-733d8b40c345 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2230.777541] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1951961, 'name': CreateVM_Task, 'duration_secs': 0.434162} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2230.777718] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2230.778406] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/bca855b4-4ff0-428a-971b-15d087bf0880" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2230.778576] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquired lock "[datastore1] devstack-image-cache_base/bca855b4-4ff0-428a-971b-15d087bf0880" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2230.778980] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/bca855b4-4ff0-428a-971b-15d087bf0880" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2230.779846] env[63024]: DEBUG nova.scheduler.client.report [None req-331980db-3e87-42c5-b6f3-5c09da0c7977 tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2230.782814] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6137096-a360-4edb-9c5e-d1b3cc7eb64b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2230.789652] env[63024]: DEBUG oslo_vmware.api [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2230.789652] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e3c7f8-4640-2c6f-0cdb-711261ddabf6" [ 2230.789652] env[63024]: _type = "Task" [ 2230.789652] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2230.797925] env[63024]: DEBUG oslo_vmware.api [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e3c7f8-4640-2c6f-0cdb-711261ddabf6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2231.083872] env[63024]: DEBUG oslo_vmware.api [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951962, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.462809} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2231.084147] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 1013a279-f79d-467e-a37e-7e66f77db625/1013a279-f79d-467e-a37e-7e66f77db625.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2231.084364] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2231.084611] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-21e66b75-4025-41d8-8a4e-7c25314efd26 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2231.091792] env[63024]: DEBUG oslo_vmware.api [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2231.091792] env[63024]: value = "task-1951963" [ 2231.091792] env[63024]: _type = "Task" [ 2231.091792] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2231.098876] env[63024]: DEBUG oslo_vmware.api [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951963, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2231.285552] env[63024]: DEBUG oslo_concurrency.lockutils [None req-331980db-3e87-42c5-b6f3-5c09da0c7977 tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.685s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2231.300961] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Releasing lock "[datastore1] devstack-image-cache_base/bca855b4-4ff0-428a-971b-15d087bf0880" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2231.301236] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Processing image bca855b4-4ff0-428a-971b-15d087bf0880 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2231.301467] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/bca855b4-4ff0-428a-971b-15d087bf0880/bca855b4-4ff0-428a-971b-15d087bf0880.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2231.301616] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquired lock "[datastore1] devstack-image-cache_base/bca855b4-4ff0-428a-971b-15d087bf0880/bca855b4-4ff0-428a-971b-15d087bf0880.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2231.301792] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2231.302049] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-59f98e80-4739-4213-8f5f-7279a1818550 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2231.304424] env[63024]: INFO nova.scheduler.client.report [None req-331980db-3e87-42c5-b6f3-5c09da0c7977 tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Deleted allocations for instance 6917758b-4b68-4a5a-b7e5-b2ffdade19d7 [ 2231.313761] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2231.314114] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2231.314885] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1043815-3cde-425c-adfe-f57cc29b19c0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2231.320228] env[63024]: DEBUG oslo_vmware.api [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2231.320228] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52ec0db1-1e9e-0ab2-122f-8ce2e8d622e3" [ 2231.320228] env[63024]: _type = "Task" [ 2231.320228] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2231.327892] env[63024]: DEBUG oslo_vmware.api [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52ec0db1-1e9e-0ab2-122f-8ce2e8d622e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2231.463664] env[63024]: DEBUG nova.network.neutron [req-63fdc89e-6c38-4c46-a887-6c619e4b8be8 req-c3ef98de-d9be-4d0f-95ab-e75b41a837e4 service nova] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Updated VIF entry in instance network info cache for port 4ba24717-2947-46f0-9df8-733d8b40c345. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2231.464018] env[63024]: DEBUG nova.network.neutron [req-63fdc89e-6c38-4c46-a887-6c619e4b8be8 req-c3ef98de-d9be-4d0f-95ab-e75b41a837e4 service nova] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Updating instance_info_cache with network_info: [{"id": "4ba24717-2947-46f0-9df8-733d8b40c345", "address": "fa:16:3e:3b:41:f5", "network": {"id": "5503f5f5-9a63-4bb8-bc39-97863100c3df", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1218611362-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd0c44555e30414c83750b762e243dc1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ba24717-29", "ovs_interfaceid": "4ba24717-2947-46f0-9df8-733d8b40c345", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2231.601803] env[63024]: DEBUG oslo_vmware.api [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951963, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062134} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2231.602258] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2231.603203] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b430ab5-e094-4ba8-b4c9-127e90cee77c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2231.624864] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Reconfiguring VM instance instance-0000007a to attach disk [datastore1] 1013a279-f79d-467e-a37e-7e66f77db625/1013a279-f79d-467e-a37e-7e66f77db625.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2231.625101] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-736981c8-3433-4ad6-bb3e-07fedd02441f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2231.644476] env[63024]: DEBUG oslo_vmware.api [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2231.644476] env[63024]: value = "task-1951964" [ 2231.644476] env[63024]: _type = "Task" [ 2231.644476] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2231.652006] env[63024]: DEBUG oslo_vmware.api [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951964, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2231.812708] env[63024]: DEBUG oslo_concurrency.lockutils [None req-331980db-3e87-42c5-b6f3-5c09da0c7977 tempest-ServersNegativeTestMultiTenantJSON-2100881307 tempest-ServersNegativeTestMultiTenantJSON-2100881307-project-member] Lock "6917758b-4b68-4a5a-b7e5-b2ffdade19d7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.755s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2231.830687] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Preparing fetch location {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2231.831168] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Fetch image to [datastore1] OSTACK_IMG_b2724391-c407-4ccf-b414-96080ae11c39/OSTACK_IMG_b2724391-c407-4ccf-b414-96080ae11c39.vmdk {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2231.831168] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Downloading stream optimized image bca855b4-4ff0-428a-971b-15d087bf0880 to [datastore1] OSTACK_IMG_b2724391-c407-4ccf-b414-96080ae11c39/OSTACK_IMG_b2724391-c407-4ccf-b414-96080ae11c39.vmdk on the data store datastore1 as vApp {{(pid=63024) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 2231.831310] env[63024]: DEBUG nova.virt.vmwareapi.images [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Downloading image file data bca855b4-4ff0-428a-971b-15d087bf0880 to the ESX as VM named 'OSTACK_IMG_b2724391-c407-4ccf-b414-96080ae11c39' {{(pid=63024) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 2231.904737] env[63024]: DEBUG oslo_vmware.rw_handles [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 2231.904737] env[63024]: value = "resgroup-9" [ 2231.904737] env[63024]: _type = "ResourcePool" [ 2231.904737] env[63024]: }. {{(pid=63024) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 2231.905372] env[63024]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-b1de39b4-c466-4291-a125-ee83bdb29b78 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2231.926141] env[63024]: DEBUG oslo_vmware.rw_handles [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lease: (returnval){ [ 2231.926141] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5209aa68-384f-5bb6-21bc-6b0e1d9930e9" [ 2231.926141] env[63024]: _type = "HttpNfcLease" [ 2231.926141] env[63024]: } obtained for vApp import into resource pool (val){ [ 2231.926141] env[63024]: value = "resgroup-9" [ 2231.926141] env[63024]: _type = "ResourcePool" [ 2231.926141] env[63024]: }. {{(pid=63024) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 2231.926538] env[63024]: DEBUG oslo_vmware.api [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the lease: (returnval){ [ 2231.926538] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5209aa68-384f-5bb6-21bc-6b0e1d9930e9" [ 2231.926538] env[63024]: _type = "HttpNfcLease" [ 2231.926538] env[63024]: } to be ready. {{(pid=63024) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2231.933347] env[63024]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2231.933347] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5209aa68-384f-5bb6-21bc-6b0e1d9930e9" [ 2231.933347] env[63024]: _type = "HttpNfcLease" [ 2231.933347] env[63024]: } is initializing. {{(pid=63024) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2231.967036] env[63024]: DEBUG oslo_concurrency.lockutils [req-63fdc89e-6c38-4c46-a887-6c619e4b8be8 req-c3ef98de-d9be-4d0f-95ab-e75b41a837e4 service nova] Releasing lock "refresh_cache-14bafeba-9f5b-4488-b29c-38939973deb9" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2232.155648] env[63024]: DEBUG oslo_vmware.api [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951964, 'name': ReconfigVM_Task, 'duration_secs': 0.311575} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2232.156087] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Reconfigured VM instance instance-0000007a to attach disk [datastore1] 1013a279-f79d-467e-a37e-7e66f77db625/1013a279-f79d-467e-a37e-7e66f77db625.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2232.156834] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-991520e2-901d-4e05-9872-fcd88aa7c5e3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2232.164282] env[63024]: DEBUG oslo_vmware.api [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2232.164282] env[63024]: value = "task-1951966" [ 2232.164282] env[63024]: _type = "Task" [ 2232.164282] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2232.175937] env[63024]: DEBUG oslo_vmware.api [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951966, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2232.435964] env[63024]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2232.435964] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5209aa68-384f-5bb6-21bc-6b0e1d9930e9" [ 2232.435964] env[63024]: _type = "HttpNfcLease" [ 2232.435964] env[63024]: } is initializing. {{(pid=63024) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2232.674684] env[63024]: DEBUG oslo_vmware.api [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951966, 'name': Rename_Task, 'duration_secs': 0.159399} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2232.675044] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2232.675195] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5258febf-4101-42e2-9070-4e3371e625fb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2232.681628] env[63024]: DEBUG oslo_vmware.api [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2232.681628] env[63024]: value = "task-1951967" [ 2232.681628] env[63024]: _type = "Task" [ 2232.681628] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2232.688855] env[63024]: DEBUG oslo_vmware.api [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951967, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2232.934520] env[63024]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2232.934520] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5209aa68-384f-5bb6-21bc-6b0e1d9930e9" [ 2232.934520] env[63024]: _type = "HttpNfcLease" [ 2232.934520] env[63024]: } is initializing. {{(pid=63024) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2233.193859] env[63024]: DEBUG oslo_vmware.api [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951967, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2233.434727] env[63024]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2233.434727] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5209aa68-384f-5bb6-21bc-6b0e1d9930e9" [ 2233.434727] env[63024]: _type = "HttpNfcLease" [ 2233.434727] env[63024]: } is initializing. {{(pid=63024) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2233.692189] env[63024]: DEBUG oslo_vmware.api [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951967, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2233.935660] env[63024]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2233.935660] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5209aa68-384f-5bb6-21bc-6b0e1d9930e9" [ 2233.935660] env[63024]: _type = "HttpNfcLease" [ 2233.935660] env[63024]: } is ready. {{(pid=63024) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2233.936427] env[63024]: DEBUG oslo_vmware.rw_handles [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2233.936427] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5209aa68-384f-5bb6-21bc-6b0e1d9930e9" [ 2233.936427] env[63024]: _type = "HttpNfcLease" [ 2233.936427] env[63024]: }. {{(pid=63024) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 2233.936726] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b6751e8-46ea-4277-99c8-981bcb64ec6f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.943983] env[63024]: DEBUG oslo_vmware.rw_handles [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f10c14-7792-a75e-ffca-6a069016adc7/disk-0.vmdk from lease info. {{(pid=63024) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2233.944153] env[63024]: DEBUG oslo_vmware.rw_handles [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Creating HTTP connection to write to file with size = 31669248 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f10c14-7792-a75e-ffca-6a069016adc7/disk-0.vmdk. {{(pid=63024) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2234.014019] env[63024]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-693521be-2b4a-4dc6-8f82-18d8bd027e64 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2234.193282] env[63024]: DEBUG oslo_vmware.api [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951967, 'name': PowerOnVM_Task, 'duration_secs': 1.183495} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2234.194629] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2234.194834] env[63024]: INFO nova.compute.manager [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Took 7.92 seconds to spawn the instance on the hypervisor. [ 2234.195029] env[63024]: DEBUG nova.compute.manager [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2234.195865] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84b4ceb0-c832-4157-ad91-4d5710544f6c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2234.724024] env[63024]: INFO nova.compute.manager [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Took 13.40 seconds to build instance. [ 2235.101787] env[63024]: DEBUG oslo_vmware.rw_handles [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Completed reading data from the image iterator. {{(pid=63024) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2235.101787] env[63024]: DEBUG oslo_vmware.rw_handles [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f10c14-7792-a75e-ffca-6a069016adc7/disk-0.vmdk. {{(pid=63024) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2235.102404] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1863a635-4fdd-48e1-ae94-727ddd560664 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.109775] env[63024]: DEBUG oslo_vmware.rw_handles [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f10c14-7792-a75e-ffca-6a069016adc7/disk-0.vmdk is in state: ready. {{(pid=63024) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2235.109775] env[63024]: DEBUG oslo_vmware.rw_handles [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f10c14-7792-a75e-ffca-6a069016adc7/disk-0.vmdk. {{(pid=63024) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 2235.109775] env[63024]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-76a5f474-052b-4b06-9dbb-1facca1c3f7c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.225371] env[63024]: DEBUG oslo_concurrency.lockutils [None req-bf1e5af2-0de3-4086-979b-989c3277e2b7 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "1013a279-f79d-467e-a37e-7e66f77db625" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.928s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2235.625896] env[63024]: DEBUG oslo_vmware.rw_handles [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52f10c14-7792-a75e-ffca-6a069016adc7/disk-0.vmdk. {{(pid=63024) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 2235.626137] env[63024]: INFO nova.virt.vmwareapi.images [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Downloaded image file data bca855b4-4ff0-428a-971b-15d087bf0880 [ 2235.626999] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bb2f0a9-e6f4-4b38-a884-2f70c8cc2e60 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.642494] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-55f679f4-e954-4c58-84e8-db771a7cd47e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.672750] env[63024]: INFO nova.virt.vmwareapi.images [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] The imported VM was unregistered [ 2235.675358] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Caching image {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2235.675613] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Creating directory with path [datastore1] devstack-image-cache_base/bca855b4-4ff0-428a-971b-15d087bf0880 {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2235.675912] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2377ba4b-49f1-4bb5-89cf-793bcc4ae1be {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.689817] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Created directory with path [datastore1] devstack-image-cache_base/bca855b4-4ff0-428a-971b-15d087bf0880 {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2235.690199] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_b2724391-c407-4ccf-b414-96080ae11c39/OSTACK_IMG_b2724391-c407-4ccf-b414-96080ae11c39.vmdk to [datastore1] devstack-image-cache_base/bca855b4-4ff0-428a-971b-15d087bf0880/bca855b4-4ff0-428a-971b-15d087bf0880.vmdk. {{(pid=63024) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 2235.690400] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-4d2e9be6-d401-42ee-88e3-13c3c1e1f753 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.697302] env[63024]: DEBUG oslo_vmware.api [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2235.697302] env[63024]: value = "task-1951969" [ 2235.697302] env[63024]: _type = "Task" [ 2235.697302] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2235.707388] env[63024]: DEBUG oslo_vmware.api [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951969, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2236.207457] env[63024]: DEBUG oslo_vmware.api [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951969, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2236.709666] env[63024]: DEBUG oslo_vmware.api [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951969, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2236.937246] env[63024]: DEBUG nova.compute.manager [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Stashing vm_state: active {{(pid=63024) _prep_resize /opt/stack/nova/nova/compute/manager.py:5954}} [ 2237.211033] env[63024]: DEBUG oslo_vmware.api [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951969, 'name': MoveVirtualDisk_Task} progress is 43%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2237.460504] env[63024]: DEBUG oslo_concurrency.lockutils [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2237.460861] env[63024]: DEBUG oslo_concurrency.lockutils [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2237.711663] env[63024]: DEBUG oslo_vmware.api [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951969, 'name': MoveVirtualDisk_Task} progress is 69%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2237.965656] env[63024]: INFO nova.compute.claims [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2238.211627] env[63024]: DEBUG oslo_vmware.api [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951969, 'name': MoveVirtualDisk_Task} progress is 91%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2238.471367] env[63024]: INFO nova.compute.resource_tracker [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Updating resource usage from migration 965540a2-c1cb-4023-a02f-158a849d197f [ 2238.546730] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a424e097-3711-4f6c-829a-b8e17d36ec25 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2238.554854] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e256b4a9-1583-4cc1-a3b2-490f8e843803 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2238.602824] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83657dd4-bf87-49c9-a2e9-e8e0d1f039d3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2238.610624] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-098c7874-48f3-4e2d-93bc-d808482ddea6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2238.623545] env[63024]: DEBUG nova.compute.provider_tree [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2238.709474] env[63024]: DEBUG oslo_vmware.api [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951969, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2239.126409] env[63024]: DEBUG nova.scheduler.client.report [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2239.210709] env[63024]: DEBUG oslo_vmware.api [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951969, 'name': MoveVirtualDisk_Task, 'duration_secs': 3.074753} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2239.210988] env[63024]: INFO nova.virt.vmwareapi.ds_util [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_b2724391-c407-4ccf-b414-96080ae11c39/OSTACK_IMG_b2724391-c407-4ccf-b414-96080ae11c39.vmdk to [datastore1] devstack-image-cache_base/bca855b4-4ff0-428a-971b-15d087bf0880/bca855b4-4ff0-428a-971b-15d087bf0880.vmdk. [ 2239.211194] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Cleaning up location [datastore1] OSTACK_IMG_b2724391-c407-4ccf-b414-96080ae11c39 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 2239.211359] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_b2724391-c407-4ccf-b414-96080ae11c39 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2239.211614] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6f12245d-9dc5-416a-afe0-413105d924d4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2239.217901] env[63024]: DEBUG oslo_vmware.api [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2239.217901] env[63024]: value = "task-1951970" [ 2239.217901] env[63024]: _type = "Task" [ 2239.217901] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2239.225427] env[63024]: DEBUG oslo_vmware.api [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951970, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2239.632324] env[63024]: DEBUG oslo_concurrency.lockutils [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.171s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2239.632543] env[63024]: INFO nova.compute.manager [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Migrating [ 2239.727224] env[63024]: DEBUG oslo_vmware.api [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951970, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.395135} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2239.727473] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2239.727641] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Releasing lock "[datastore1] devstack-image-cache_base/bca855b4-4ff0-428a-971b-15d087bf0880/bca855b4-4ff0-428a-971b-15d087bf0880.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2239.727880] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/bca855b4-4ff0-428a-971b-15d087bf0880/bca855b4-4ff0-428a-971b-15d087bf0880.vmdk to [datastore1] 14bafeba-9f5b-4488-b29c-38939973deb9/14bafeba-9f5b-4488-b29c-38939973deb9.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2239.728137] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b0de7128-41a6-4729-90cf-9a83513b498f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2239.734520] env[63024]: DEBUG oslo_vmware.api [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2239.734520] env[63024]: value = "task-1951971" [ 2239.734520] env[63024]: _type = "Task" [ 2239.734520] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2239.741585] env[63024]: DEBUG oslo_vmware.api [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951971, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2240.146942] env[63024]: DEBUG oslo_concurrency.lockutils [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "refresh_cache-1013a279-f79d-467e-a37e-7e66f77db625" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2240.147211] env[63024]: DEBUG oslo_concurrency.lockutils [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquired lock "refresh_cache-1013a279-f79d-467e-a37e-7e66f77db625" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2240.147267] env[63024]: DEBUG nova.network.neutron [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2240.245776] env[63024]: DEBUG oslo_vmware.api [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951971, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2240.748163] env[63024]: DEBUG oslo_vmware.api [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951971, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2240.862863] env[63024]: DEBUG nova.network.neutron [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Updating instance_info_cache with network_info: [{"id": "776a40cb-b4fc-40ff-9fda-a77dca4c0001", "address": "fa:16:3e:83:a3:48", "network": {"id": "c25a78c5-2c6d-4c87-8575-9620fbc983bd", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1693993539-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93098ad83ae144bf90a12c97ec863c06", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e66c4ebe-f808-4b34-bdb5-6c45edb1736f", "external-id": "cl2-zone-719", "segmentation_id": 719, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap776a40cb-b4", "ovs_interfaceid": "776a40cb-b4fc-40ff-9fda-a77dca4c0001", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2241.248605] env[63024]: DEBUG oslo_vmware.api [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951971, 'name': CopyVirtualDisk_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2241.365663] env[63024]: DEBUG oslo_concurrency.lockutils [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Releasing lock "refresh_cache-1013a279-f79d-467e-a37e-7e66f77db625" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2241.750678] env[63024]: DEBUG oslo_vmware.api [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951971, 'name': CopyVirtualDisk_Task} progress is 88%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2242.251733] env[63024]: DEBUG oslo_vmware.api [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951971, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.288619} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2242.252089] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/bca855b4-4ff0-428a-971b-15d087bf0880/bca855b4-4ff0-428a-971b-15d087bf0880.vmdk to [datastore1] 14bafeba-9f5b-4488-b29c-38939973deb9/14bafeba-9f5b-4488-b29c-38939973deb9.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2242.253122] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-078f94f6-aaab-49dd-be65-7f4ca06a0d07 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2242.282217] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Reconfiguring VM instance instance-00000073 to attach disk [datastore1] 14bafeba-9f5b-4488-b29c-38939973deb9/14bafeba-9f5b-4488-b29c-38939973deb9.vmdk or device None with type streamOptimized {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2242.282498] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3661546c-a71e-4ea5-94b8-c0602fd670a2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2242.301644] env[63024]: DEBUG oslo_vmware.api [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2242.301644] env[63024]: value = "task-1951972" [ 2242.301644] env[63024]: _type = "Task" [ 2242.301644] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2242.309371] env[63024]: DEBUG oslo_vmware.api [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951972, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2242.812157] env[63024]: DEBUG oslo_vmware.api [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951972, 'name': ReconfigVM_Task, 'duration_secs': 0.297728} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2242.812385] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Reconfigured VM instance instance-00000073 to attach disk [datastore1] 14bafeba-9f5b-4488-b29c-38939973deb9/14bafeba-9f5b-4488-b29c-38939973deb9.vmdk or device None with type streamOptimized {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2242.813549] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'size': 0, 'device_name': '/dev/sda', 'encryption_secret_uuid': None, 'encrypted': False, 'encryption_options': None, 'boot_index': 0, 'encryption_format': None, 'guest_format': None, 'device_type': 'disk', 'disk_bus': None, 'image_id': '2646ca61-612e-4bc3-97f7-ee492c048835'}], 'ephemerals': [], 'block_device_mapping': [{'guest_format': None, 'attachment_id': '91daad2c-4074-46f9-92ec-9e79126689e3', 'boot_index': None, 'delete_on_termination': False, 'mount_device': '/dev/sdb', 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402275', 'volume_id': '06603432-9319-4172-af00-cbb4469c359f', 'name': 'volume-06603432-9319-4172-af00-cbb4469c359f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '14bafeba-9f5b-4488-b29c-38939973deb9', 'attached_at': '', 'detached_at': '', 'volume_id': '06603432-9319-4172-af00-cbb4469c359f', 'serial': '06603432-9319-4172-af00-cbb4469c359f'}, 'device_type': None, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=63024) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 2242.813775] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Volume attach. Driver type: vmdk {{(pid=63024) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2242.813964] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402275', 'volume_id': '06603432-9319-4172-af00-cbb4469c359f', 'name': 'volume-06603432-9319-4172-af00-cbb4469c359f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '14bafeba-9f5b-4488-b29c-38939973deb9', 'attached_at': '', 'detached_at': '', 'volume_id': '06603432-9319-4172-af00-cbb4469c359f', 'serial': '06603432-9319-4172-af00-cbb4469c359f'} {{(pid=63024) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2242.814713] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec601740-68f9-4aec-8e99-ae5228339985 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2242.829772] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-653c1f2d-4d85-4923-a3bd-5d93b9e08614 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2242.854178] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Reconfiguring VM instance instance-00000073 to attach disk [datastore1] volume-06603432-9319-4172-af00-cbb4469c359f/volume-06603432-9319-4172-af00-cbb4469c359f.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2242.854332] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-150bf3d9-3741-46a2-a35f-75d06d261607 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2242.872388] env[63024]: DEBUG oslo_vmware.api [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2242.872388] env[63024]: value = "task-1951973" [ 2242.872388] env[63024]: _type = "Task" [ 2242.872388] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2242.881150] env[63024]: DEBUG oslo_vmware.api [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951973, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2242.882202] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38ce83cc-923b-43b8-b669-372ee8dcf721 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2242.898806] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Updating instance '1013a279-f79d-467e-a37e-7e66f77db625' progress to 0 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2243.382428] env[63024]: DEBUG oslo_vmware.api [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951973, 'name': ReconfigVM_Task, 'duration_secs': 0.294542} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2243.382752] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Reconfigured VM instance instance-00000073 to attach disk [datastore1] volume-06603432-9319-4172-af00-cbb4469c359f/volume-06603432-9319-4172-af00-cbb4469c359f.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2243.387591] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-17273c27-2297-4cf0-ace3-70c092d46856 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.404295] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2243.404642] env[63024]: DEBUG oslo_vmware.api [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2243.404642] env[63024]: value = "task-1951974" [ 2243.404642] env[63024]: _type = "Task" [ 2243.404642] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2243.404846] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b341ecd5-681b-4e22-872d-b2b3a3d509d4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.415224] env[63024]: DEBUG oslo_vmware.api [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951974, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2243.416480] env[63024]: DEBUG oslo_vmware.api [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2243.416480] env[63024]: value = "task-1951975" [ 2243.416480] env[63024]: _type = "Task" [ 2243.416480] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2243.424421] env[63024]: DEBUG oslo_vmware.api [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951975, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2243.916270] env[63024]: DEBUG oslo_vmware.api [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951974, 'name': ReconfigVM_Task, 'duration_secs': 0.190053} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2243.916591] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402275', 'volume_id': '06603432-9319-4172-af00-cbb4469c359f', 'name': 'volume-06603432-9319-4172-af00-cbb4469c359f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '14bafeba-9f5b-4488-b29c-38939973deb9', 'attached_at': '', 'detached_at': '', 'volume_id': '06603432-9319-4172-af00-cbb4469c359f', 'serial': '06603432-9319-4172-af00-cbb4469c359f'} {{(pid=63024) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2243.917236] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-083ce620-fd88-4018-8dfb-46e1f3cdb5f3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.926099] env[63024]: DEBUG oslo_vmware.api [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951975, 'name': PowerOffVM_Task, 'duration_secs': 0.235356} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2243.927151] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2243.927344] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Updating instance '1013a279-f79d-467e-a37e-7e66f77db625' progress to 17 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2243.930582] env[63024]: DEBUG oslo_vmware.api [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2243.930582] env[63024]: value = "task-1951976" [ 2243.930582] env[63024]: _type = "Task" [ 2243.930582] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2243.937682] env[63024]: DEBUG oslo_vmware.api [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951976, 'name': Rename_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2244.433460] env[63024]: DEBUG nova.virt.hardware [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:45Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2244.433884] env[63024]: DEBUG nova.virt.hardware [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2244.433884] env[63024]: DEBUG nova.virt.hardware [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2244.434082] env[63024]: DEBUG nova.virt.hardware [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2244.434233] env[63024]: DEBUG nova.virt.hardware [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2244.434384] env[63024]: DEBUG nova.virt.hardware [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2244.434597] env[63024]: DEBUG nova.virt.hardware [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2244.434758] env[63024]: DEBUG nova.virt.hardware [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2244.434924] env[63024]: DEBUG nova.virt.hardware [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2244.435105] env[63024]: DEBUG nova.virt.hardware [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2244.435280] env[63024]: DEBUG nova.virt.hardware [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2244.440230] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-74b0e02d-0b33-4e59-a9c4-e0ca91082e1e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2244.458326] env[63024]: DEBUG oslo_vmware.api [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951976, 'name': Rename_Task, 'duration_secs': 0.147896} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2244.459499] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2244.459797] env[63024]: DEBUG oslo_vmware.api [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2244.459797] env[63024]: value = "task-1951977" [ 2244.459797] env[63024]: _type = "Task" [ 2244.459797] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2244.459975] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3aef8df1-1517-4314-9a70-ed9c7b3c9f59 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2244.468582] env[63024]: DEBUG oslo_vmware.api [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951977, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2244.469657] env[63024]: DEBUG oslo_vmware.api [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2244.469657] env[63024]: value = "task-1951978" [ 2244.469657] env[63024]: _type = "Task" [ 2244.469657] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2244.476621] env[63024]: DEBUG oslo_vmware.api [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951978, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2244.629565] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3f55ee8b-a86a-4640-8f48-c508124f1939 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquiring lock "5919cc21-67b8-47d4-9909-bc972b42914d" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2244.629807] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3f55ee8b-a86a-4640-8f48-c508124f1939 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "5919cc21-67b8-47d4-9909-bc972b42914d" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2244.971391] env[63024]: DEBUG oslo_vmware.api [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951977, 'name': ReconfigVM_Task, 'duration_secs': 0.168124} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2244.974293] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Updating instance '1013a279-f79d-467e-a37e-7e66f77db625' progress to 33 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2244.982374] env[63024]: DEBUG oslo_vmware.api [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951978, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2245.135054] env[63024]: DEBUG nova.compute.utils [None req-3f55ee8b-a86a-4640-8f48-c508124f1939 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2245.481969] env[63024]: DEBUG nova.virt.hardware [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2245.482470] env[63024]: DEBUG nova.virt.hardware [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2245.482470] env[63024]: DEBUG nova.virt.hardware [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2245.482550] env[63024]: DEBUG nova.virt.hardware [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2245.482680] env[63024]: DEBUG nova.virt.hardware [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2245.482828] env[63024]: DEBUG nova.virt.hardware [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2245.483042] env[63024]: DEBUG nova.virt.hardware [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2245.483204] env[63024]: DEBUG nova.virt.hardware [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2245.483374] env[63024]: DEBUG nova.virt.hardware [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2245.483533] env[63024]: DEBUG nova.virt.hardware [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2245.483707] env[63024]: DEBUG nova.virt.hardware [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2245.488756] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Reconfiguring VM instance instance-0000007a to detach disk 2000 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2245.489035] env[63024]: DEBUG oslo_vmware.api [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1951978, 'name': PowerOnVM_Task, 'duration_secs': 0.975343} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2245.489249] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-49e7d8e3-1134-4eeb-8188-dd0ce4ae1fbc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.501745] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2245.509076] env[63024]: DEBUG oslo_vmware.api [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2245.509076] env[63024]: value = "task-1951979" [ 2245.509076] env[63024]: _type = "Task" [ 2245.509076] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2245.516788] env[63024]: DEBUG oslo_vmware.api [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951979, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2245.600119] env[63024]: DEBUG nova.compute.manager [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2245.601056] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-876053aa-289f-4e59-b813-9cd189a5b479 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.638483] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3f55ee8b-a86a-4640-8f48-c508124f1939 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "5919cc21-67b8-47d4-9909-bc972b42914d" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2246.020471] env[63024]: DEBUG oslo_vmware.api [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951979, 'name': ReconfigVM_Task, 'duration_secs': 0.163063} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2246.020898] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Reconfigured VM instance instance-0000007a to detach disk 2000 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2246.022041] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66a4ac41-2a81-41e0-9124-b39aff183c6c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.043722] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Reconfiguring VM instance instance-0000007a to attach disk [datastore1] 1013a279-f79d-467e-a37e-7e66f77db625/1013a279-f79d-467e-a37e-7e66f77db625.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2246.043947] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5081c6f7-e1a1-44bf-95a5-22d7659c2730 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.061931] env[63024]: DEBUG oslo_vmware.api [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2246.061931] env[63024]: value = "task-1951980" [ 2246.061931] env[63024]: _type = "Task" [ 2246.061931] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2246.069230] env[63024]: DEBUG oslo_vmware.api [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951980, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2246.117715] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5bef7887-8158-4338-b484-14158eb3b4ba tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lock "14bafeba-9f5b-4488-b29c-38939973deb9" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 30.915s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2246.572286] env[63024]: DEBUG oslo_vmware.api [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951980, 'name': ReconfigVM_Task, 'duration_secs': 0.232872} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2246.572633] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Reconfigured VM instance instance-0000007a to attach disk [datastore1] 1013a279-f79d-467e-a37e-7e66f77db625/1013a279-f79d-467e-a37e-7e66f77db625.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2246.572918] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Updating instance '1013a279-f79d-467e-a37e-7e66f77db625' progress to 50 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2246.695310] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3f55ee8b-a86a-4640-8f48-c508124f1939 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquiring lock "5919cc21-67b8-47d4-9909-bc972b42914d" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2246.695548] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3f55ee8b-a86a-4640-8f48-c508124f1939 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "5919cc21-67b8-47d4-9909-bc972b42914d" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2246.695867] env[63024]: INFO nova.compute.manager [None req-3f55ee8b-a86a-4640-8f48-c508124f1939 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Attaching volume 438dbce6-3775-4c10-8973-59ce0ebb3901 to /dev/sdb [ 2246.726846] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a940fd33-8209-4c74-bca9-9537938429d6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.734193] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6303b655-8166-4e7d-9b0b-43fd3a86e410 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.738110] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e659f700-6547-479e-9c4f-e93700e136ae tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "8901e234-22a9-4523-8658-411aa19e01e0" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2246.738346] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e659f700-6547-479e-9c4f-e93700e136ae tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "8901e234-22a9-4523-8658-411aa19e01e0" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2246.738524] env[63024]: DEBUG nova.compute.manager [None req-e659f700-6547-479e-9c4f-e93700e136ae tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2246.739227] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2a19a45-11ce-4277-9837-b758178f3da6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2246.747118] env[63024]: DEBUG nova.virt.block_device [None req-3f55ee8b-a86a-4640-8f48-c508124f1939 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Updating existing volume attachment record: c61acfd0-a2de-4cb1-b46a-60014f6d03f0 {{(pid=63024) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 2246.750186] env[63024]: DEBUG nova.compute.manager [None req-e659f700-6547-479e-9c4f-e93700e136ae tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=63024) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 2246.750700] env[63024]: DEBUG nova.objects.instance [None req-e659f700-6547-479e-9c4f-e93700e136ae tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lazy-loading 'flavor' on Instance uuid 8901e234-22a9-4523-8658-411aa19e01e0 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2247.079588] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cabaf8bd-d586-4724-a3f4-5332f71cb148 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2247.099012] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0301de7c-ca1e-45a9-ae8a-5a11f6571f0a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2247.115690] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Updating instance '1013a279-f79d-467e-a37e-7e66f77db625' progress to 67 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2247.654993] env[63024]: DEBUG nova.network.neutron [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Port 776a40cb-b4fc-40ff-9fda-a77dca4c0001 binding to destination host cpu-1 is already ACTIVE {{(pid=63024) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 2247.757506] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-e659f700-6547-479e-9c4f-e93700e136ae tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2247.757810] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d3b4d700-da84-4ac4-89c6-bbe96988677e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2247.765288] env[63024]: DEBUG oslo_vmware.api [None req-e659f700-6547-479e-9c4f-e93700e136ae tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 2247.765288] env[63024]: value = "task-1951982" [ 2247.765288] env[63024]: _type = "Task" [ 2247.765288] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2247.773234] env[63024]: DEBUG oslo_vmware.api [None req-e659f700-6547-479e-9c4f-e93700e136ae tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951982, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2248.275302] env[63024]: DEBUG oslo_vmware.api [None req-e659f700-6547-479e-9c4f-e93700e136ae tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951982, 'name': PowerOffVM_Task, 'duration_secs': 0.219767} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2248.275599] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-e659f700-6547-479e-9c4f-e93700e136ae tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2248.275807] env[63024]: DEBUG nova.compute.manager [None req-e659f700-6547-479e-9c4f-e93700e136ae tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2248.276587] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-071c6810-e2bc-4707-88dd-06bde4f986f7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2248.677152] env[63024]: DEBUG oslo_concurrency.lockutils [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "1013a279-f79d-467e-a37e-7e66f77db625-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2248.677441] env[63024]: DEBUG oslo_concurrency.lockutils [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "1013a279-f79d-467e-a37e-7e66f77db625-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2248.677564] env[63024]: DEBUG oslo_concurrency.lockutils [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "1013a279-f79d-467e-a37e-7e66f77db625-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2248.788209] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e659f700-6547-479e-9c4f-e93700e136ae tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "8901e234-22a9-4523-8658-411aa19e01e0" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.050s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2249.093500] env[63024]: DEBUG nova.objects.instance [None req-4139ee0a-1ddd-4e9a-b4e2-401880b428cc tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lazy-loading 'flavor' on Instance uuid 8901e234-22a9-4523-8658-411aa19e01e0 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2249.598608] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4139ee0a-1ddd-4e9a-b4e2-401880b428cc tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "refresh_cache-8901e234-22a9-4523-8658-411aa19e01e0" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2249.598788] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4139ee0a-1ddd-4e9a-b4e2-401880b428cc tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquired lock "refresh_cache-8901e234-22a9-4523-8658-411aa19e01e0" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2249.598945] env[63024]: DEBUG nova.network.neutron [None req-4139ee0a-1ddd-4e9a-b4e2-401880b428cc tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2249.599144] env[63024]: DEBUG nova.objects.instance [None req-4139ee0a-1ddd-4e9a-b4e2-401880b428cc tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lazy-loading 'info_cache' on Instance uuid 8901e234-22a9-4523-8658-411aa19e01e0 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2249.713044] env[63024]: DEBUG oslo_concurrency.lockutils [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "refresh_cache-1013a279-f79d-467e-a37e-7e66f77db625" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2249.713289] env[63024]: DEBUG oslo_concurrency.lockutils [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquired lock "refresh_cache-1013a279-f79d-467e-a37e-7e66f77db625" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2249.713410] env[63024]: DEBUG nova.network.neutron [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2250.102730] env[63024]: DEBUG nova.objects.base [None req-4139ee0a-1ddd-4e9a-b4e2-401880b428cc tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Object Instance<8901e234-22a9-4523-8658-411aa19e01e0> lazy-loaded attributes: flavor,info_cache {{(pid=63024) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2250.401073] env[63024]: DEBUG nova.network.neutron [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Updating instance_info_cache with network_info: [{"id": "776a40cb-b4fc-40ff-9fda-a77dca4c0001", "address": "fa:16:3e:83:a3:48", "network": {"id": "c25a78c5-2c6d-4c87-8575-9620fbc983bd", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1693993539-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93098ad83ae144bf90a12c97ec863c06", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e66c4ebe-f808-4b34-bdb5-6c45edb1736f", "external-id": "cl2-zone-719", "segmentation_id": 719, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap776a40cb-b4", "ovs_interfaceid": "776a40cb-b4fc-40ff-9fda-a77dca4c0001", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2250.804146] env[63024]: DEBUG nova.network.neutron [None req-4139ee0a-1ddd-4e9a-b4e2-401880b428cc tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Updating instance_info_cache with network_info: [{"id": "f00f50c5-72cc-47ac-b97d-c507d47aa150", "address": "fa:16:3e:39:d0:25", "network": {"id": "0cbe22f7-6322-4d92-9a77-2753f6449a2d", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-366684254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.141", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6072e8931d9540ad8fe4a2b4b1ec782d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3ccbdbb-8b49-4a26-913f-2a448b72280f", "external-id": "nsx-vlan-transportzone-412", "segmentation_id": 412, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf00f50c5-72", "ovs_interfaceid": "f00f50c5-72cc-47ac-b97d-c507d47aa150", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2250.903931] env[63024]: DEBUG oslo_concurrency.lockutils [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Releasing lock "refresh_cache-1013a279-f79d-467e-a37e-7e66f77db625" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2251.291323] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f55ee8b-a86a-4640-8f48-c508124f1939 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Volume attach. Driver type: vmdk {{(pid=63024) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 2251.291616] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f55ee8b-a86a-4640-8f48-c508124f1939 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402279', 'volume_id': '438dbce6-3775-4c10-8973-59ce0ebb3901', 'name': 'volume-438dbce6-3775-4c10-8973-59ce0ebb3901', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5919cc21-67b8-47d4-9909-bc972b42914d', 'attached_at': '', 'detached_at': '', 'volume_id': '438dbce6-3775-4c10-8973-59ce0ebb3901', 'serial': '438dbce6-3775-4c10-8973-59ce0ebb3901'} {{(pid=63024) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 2251.292565] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cf1a5e5-9599-4e34-a04d-225b3935c2a8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.309229] env[63024]: DEBUG oslo_concurrency.lockutils [None req-4139ee0a-1ddd-4e9a-b4e2-401880b428cc tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Releasing lock "refresh_cache-8901e234-22a9-4523-8658-411aa19e01e0" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2251.311113] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49e0096b-ebc3-4a08-9ffe-1fe12a9115b9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.336062] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f55ee8b-a86a-4640-8f48-c508124f1939 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Reconfiguring VM instance instance-00000075 to attach disk [datastore1] volume-438dbce6-3775-4c10-8973-59ce0ebb3901/volume-438dbce6-3775-4c10-8973-59ce0ebb3901.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2251.336836] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3bfe4d78-ea62-4d12-9321-0ac54caae34e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.355384] env[63024]: DEBUG oslo_vmware.api [None req-3f55ee8b-a86a-4640-8f48-c508124f1939 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 2251.355384] env[63024]: value = "task-1951984" [ 2251.355384] env[63024]: _type = "Task" [ 2251.355384] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2251.363521] env[63024]: DEBUG oslo_vmware.api [None req-3f55ee8b-a86a-4640-8f48-c508124f1939 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951984, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2251.427758] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-990a2a68-812e-4b65-aeb7-f55f40c516ac {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.446288] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c20850f-8902-4837-ba2f-5b807bcb744c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.453224] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Updating instance '1013a279-f79d-467e-a37e-7e66f77db625' progress to 83 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2251.866020] env[63024]: DEBUG oslo_vmware.api [None req-3f55ee8b-a86a-4640-8f48-c508124f1939 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951984, 'name': ReconfigVM_Task, 'duration_secs': 0.309729} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2251.866434] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f55ee8b-a86a-4640-8f48-c508124f1939 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Reconfigured VM instance instance-00000075 to attach disk [datastore1] volume-438dbce6-3775-4c10-8973-59ce0ebb3901/volume-438dbce6-3775-4c10-8973-59ce0ebb3901.vmdk or device None with type thin {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2251.871618] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2f5c6525-546e-4d2b-9399-31e8095adfef {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.887766] env[63024]: DEBUG oslo_vmware.api [None req-3f55ee8b-a86a-4640-8f48-c508124f1939 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 2251.887766] env[63024]: value = "task-1951985" [ 2251.887766] env[63024]: _type = "Task" [ 2251.887766] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2251.896207] env[63024]: DEBUG oslo_vmware.api [None req-3f55ee8b-a86a-4640-8f48-c508124f1939 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951985, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2251.959140] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2251.959399] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1db4b63c-be12-4496-ac54-f5ce83852d11 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2251.967048] env[63024]: DEBUG oslo_vmware.api [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2251.967048] env[63024]: value = "task-1951986" [ 2251.967048] env[63024]: _type = "Task" [ 2251.967048] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2251.974936] env[63024]: DEBUG oslo_vmware.api [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951986, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2252.318406] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-4139ee0a-1ddd-4e9a-b4e2-401880b428cc tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2252.318735] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a664d0be-602b-4231-aa7a-9b89269dab7c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2252.323376] env[63024]: DEBUG oslo_concurrency.lockutils [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "7d78b891-34c0-46dd-8b0d-ce80517232e1" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2252.323640] env[63024]: DEBUG oslo_concurrency.lockutils [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "7d78b891-34c0-46dd-8b0d-ce80517232e1" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2252.323814] env[63024]: INFO nova.compute.manager [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Shelving [ 2252.326543] env[63024]: DEBUG oslo_vmware.api [None req-4139ee0a-1ddd-4e9a-b4e2-401880b428cc tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 2252.326543] env[63024]: value = "task-1951987" [ 2252.326543] env[63024]: _type = "Task" [ 2252.326543] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2252.334954] env[63024]: DEBUG oslo_vmware.api [None req-4139ee0a-1ddd-4e9a-b4e2-401880b428cc tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951987, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2252.398290] env[63024]: DEBUG oslo_vmware.api [None req-3f55ee8b-a86a-4640-8f48-c508124f1939 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951985, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2252.480294] env[63024]: DEBUG oslo_vmware.api [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1951986, 'name': PowerOnVM_Task, 'duration_secs': 0.367149} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2252.480636] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2252.480923] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-408e75cd-8940-4d4c-8f0f-98a850ea3109 tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Updating instance '1013a279-f79d-467e-a37e-7e66f77db625' progress to 100 {{(pid=63024) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 2252.840925] env[63024]: DEBUG oslo_vmware.api [None req-4139ee0a-1ddd-4e9a-b4e2-401880b428cc tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951987, 'name': PowerOnVM_Task, 'duration_secs': 0.384766} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2252.841158] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-4139ee0a-1ddd-4e9a-b4e2-401880b428cc tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2252.841355] env[63024]: DEBUG nova.compute.manager [None req-4139ee0a-1ddd-4e9a-b4e2-401880b428cc tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2252.842124] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-893f41f3-b801-4783-ac09-a6436f19dc31 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2252.898479] env[63024]: DEBUG oslo_vmware.api [None req-3f55ee8b-a86a-4640-8f48-c508124f1939 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951985, 'name': ReconfigVM_Task, 'duration_secs': 0.877143} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2252.898855] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-3f55ee8b-a86a-4640-8f48-c508124f1939 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402279', 'volume_id': '438dbce6-3775-4c10-8973-59ce0ebb3901', 'name': 'volume-438dbce6-3775-4c10-8973-59ce0ebb3901', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5919cc21-67b8-47d4-9909-bc972b42914d', 'attached_at': '', 'detached_at': '', 'volume_id': '438dbce6-3775-4c10-8973-59ce0ebb3901', 'serial': '438dbce6-3775-4c10-8973-59ce0ebb3901'} {{(pid=63024) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 2253.338226] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2253.338670] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-22d555cc-91cf-4f46-a3d1-86d7f41f9612 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2253.347047] env[63024]: DEBUG oslo_vmware.api [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2253.347047] env[63024]: value = "task-1951988" [ 2253.347047] env[63024]: _type = "Task" [ 2253.347047] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2253.356549] env[63024]: DEBUG oslo_vmware.api [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951988, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2253.856043] env[63024]: DEBUG oslo_vmware.api [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951988, 'name': PowerOffVM_Task, 'duration_secs': 0.222974} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2253.856261] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2253.857033] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05c6200a-d4e6-4232-a2fb-661ba0b4d078 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2253.882954] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97b60c84-7bcb-408a-8543-93b270e39757 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2253.939265] env[63024]: DEBUG nova.objects.instance [None req-3f55ee8b-a86a-4640-8f48-c508124f1939 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lazy-loading 'flavor' on Instance uuid 5919cc21-67b8-47d4-9909-bc972b42914d {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2254.160784] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-573ab1ff-99d6-45f5-bc64-97e3c8b296a2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2254.167828] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-80972ca8-4ba1-454d-89fb-d5b450360e21 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Suspending the VM {{(pid=63024) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 2254.168094] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-cc4397b4-fce1-4323-b9fa-dbcd0895f2f9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2254.173791] env[63024]: DEBUG oslo_vmware.api [None req-80972ca8-4ba1-454d-89fb-d5b450360e21 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 2254.173791] env[63024]: value = "task-1951989" [ 2254.173791] env[63024]: _type = "Task" [ 2254.173791] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2254.182425] env[63024]: DEBUG oslo_vmware.api [None req-80972ca8-4ba1-454d-89fb-d5b450360e21 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951989, 'name': SuspendVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2254.396116] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Creating Snapshot of the VM instance {{(pid=63024) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2254.396458] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-ccd1b9c8-4ad5-433d-9fa0-99c645a7552f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2254.405104] env[63024]: DEBUG oslo_vmware.api [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2254.405104] env[63024]: value = "task-1951990" [ 2254.405104] env[63024]: _type = "Task" [ 2254.405104] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2254.414752] env[63024]: DEBUG oslo_vmware.api [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951990, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2254.445074] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3f55ee8b-a86a-4640-8f48-c508124f1939 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "5919cc21-67b8-47d4-9909-bc972b42914d" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.749s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2254.685927] env[63024]: DEBUG oslo_vmware.api [None req-80972ca8-4ba1-454d-89fb-d5b450360e21 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951989, 'name': SuspendVM_Task} progress is 70%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2254.914963] env[63024]: DEBUG oslo_vmware.api [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951990, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2255.173268] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "1013a279-f79d-467e-a37e-7e66f77db625" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2255.173648] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "1013a279-f79d-467e-a37e-7e66f77db625" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2255.173792] env[63024]: DEBUG nova.compute.manager [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Going to confirm migration 10 {{(pid=63024) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5113}} [ 2255.184541] env[63024]: DEBUG oslo_vmware.api [None req-80972ca8-4ba1-454d-89fb-d5b450360e21 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951989, 'name': SuspendVM_Task, 'duration_secs': 0.634293} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2255.184827] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-80972ca8-4ba1-454d-89fb-d5b450360e21 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Suspended the VM {{(pid=63024) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 2255.185060] env[63024]: DEBUG nova.compute.manager [None req-80972ca8-4ba1-454d-89fb-d5b450360e21 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2255.185869] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-940db186-db4f-4c1d-8d68-a85f2f39fd2b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2255.199064] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5f19f009-a830-4b7e-bbae-ab32efe4cac2 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquiring lock "5919cc21-67b8-47d4-9909-bc972b42914d" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2255.199064] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5f19f009-a830-4b7e-bbae-ab32efe4cac2 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "5919cc21-67b8-47d4-9909-bc972b42914d" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2255.415660] env[63024]: DEBUG oslo_vmware.api [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951990, 'name': CreateSnapshot_Task, 'duration_secs': 0.758237} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2255.415950] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Created Snapshot of the VM instance {{(pid=63024) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2255.416662] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-254037b4-ec60-4b7c-89b3-c37c857445b1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2255.701504] env[63024]: INFO nova.compute.manager [None req-5f19f009-a830-4b7e-bbae-ab32efe4cac2 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Detaching volume 438dbce6-3775-4c10-8973-59ce0ebb3901 [ 2255.712929] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "refresh_cache-1013a279-f79d-467e-a37e-7e66f77db625" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2255.713119] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquired lock "refresh_cache-1013a279-f79d-467e-a37e-7e66f77db625" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2255.713291] env[63024]: DEBUG nova.network.neutron [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2255.713466] env[63024]: DEBUG nova.objects.instance [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lazy-loading 'info_cache' on Instance uuid 1013a279-f79d-467e-a37e-7e66f77db625 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2255.730465] env[63024]: INFO nova.virt.block_device [None req-5f19f009-a830-4b7e-bbae-ab32efe4cac2 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Attempting to driver detach volume 438dbce6-3775-4c10-8973-59ce0ebb3901 from mountpoint /dev/sdb [ 2255.730684] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f19f009-a830-4b7e-bbae-ab32efe4cac2 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Volume detach. Driver type: vmdk {{(pid=63024) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2255.730867] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f19f009-a830-4b7e-bbae-ab32efe4cac2 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402279', 'volume_id': '438dbce6-3775-4c10-8973-59ce0ebb3901', 'name': 'volume-438dbce6-3775-4c10-8973-59ce0ebb3901', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5919cc21-67b8-47d4-9909-bc972b42914d', 'attached_at': '', 'detached_at': '', 'volume_id': '438dbce6-3775-4c10-8973-59ce0ebb3901', 'serial': '438dbce6-3775-4c10-8973-59ce0ebb3901'} {{(pid=63024) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2255.731715] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27836397-d079-4eaf-b1a8-648ee0416af8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2255.752969] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bc04208-de8c-42e4-b4df-63234007be61 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2255.760172] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90f95b21-8526-4470-85e1-f02cc2e7a3ab {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2255.780824] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-687f9c47-1950-4d7f-988d-3e69c868f215 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2255.795073] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f19f009-a830-4b7e-bbae-ab32efe4cac2 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] The volume has not been displaced from its original location: [datastore1] volume-438dbce6-3775-4c10-8973-59ce0ebb3901/volume-438dbce6-3775-4c10-8973-59ce0ebb3901.vmdk. No consolidation needed. {{(pid=63024) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2255.800077] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f19f009-a830-4b7e-bbae-ab32efe4cac2 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Reconfiguring VM instance instance-00000075 to detach disk 2001 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2255.800326] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a5aed185-a8ed-44fa-ab77-c0fd6fdc8d43 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2255.818177] env[63024]: DEBUG oslo_vmware.api [None req-5f19f009-a830-4b7e-bbae-ab32efe4cac2 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 2255.818177] env[63024]: value = "task-1951991" [ 2255.818177] env[63024]: _type = "Task" [ 2255.818177] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2255.825362] env[63024]: DEBUG oslo_vmware.api [None req-5f19f009-a830-4b7e-bbae-ab32efe4cac2 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951991, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2255.933140] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Creating linked-clone VM from snapshot {{(pid=63024) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2255.933462] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-a94b6b36-b742-4935-ae60-d29a3cfaebe5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2255.942138] env[63024]: DEBUG oslo_vmware.api [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2255.942138] env[63024]: value = "task-1951992" [ 2255.942138] env[63024]: _type = "Task" [ 2255.942138] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2255.949922] env[63024]: DEBUG oslo_vmware.api [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951992, 'name': CloneVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2256.333582] env[63024]: DEBUG oslo_vmware.api [None req-5f19f009-a830-4b7e-bbae-ab32efe4cac2 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951991, 'name': ReconfigVM_Task, 'duration_secs': 0.201337} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2256.333973] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f19f009-a830-4b7e-bbae-ab32efe4cac2 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Reconfigured VM instance instance-00000075 to detach disk 2001 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2256.341900] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-31befc26-91a4-42d5-93df-c7ab63510f5f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2256.365836] env[63024]: DEBUG oslo_vmware.api [None req-5f19f009-a830-4b7e-bbae-ab32efe4cac2 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 2256.365836] env[63024]: value = "task-1951993" [ 2256.365836] env[63024]: _type = "Task" [ 2256.365836] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2256.377372] env[63024]: DEBUG oslo_vmware.api [None req-5f19f009-a830-4b7e-bbae-ab32efe4cac2 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951993, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2256.452103] env[63024]: DEBUG oslo_vmware.api [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951992, 'name': CloneVM_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2256.510387] env[63024]: INFO nova.compute.manager [None req-8f3643dd-0d59-4d50-9e43-f60e49f5fb92 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Resuming [ 2256.511105] env[63024]: DEBUG nova.objects.instance [None req-8f3643dd-0d59-4d50-9e43-f60e49f5fb92 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lazy-loading 'flavor' on Instance uuid 8901e234-22a9-4523-8658-411aa19e01e0 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2256.875276] env[63024]: DEBUG oslo_vmware.api [None req-5f19f009-a830-4b7e-bbae-ab32efe4cac2 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951993, 'name': ReconfigVM_Task, 'duration_secs': 0.131405} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2256.875593] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-5f19f009-a830-4b7e-bbae-ab32efe4cac2 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402279', 'volume_id': '438dbce6-3775-4c10-8973-59ce0ebb3901', 'name': 'volume-438dbce6-3775-4c10-8973-59ce0ebb3901', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5919cc21-67b8-47d4-9909-bc972b42914d', 'attached_at': '', 'detached_at': '', 'volume_id': '438dbce6-3775-4c10-8973-59ce0ebb3901', 'serial': '438dbce6-3775-4c10-8973-59ce0ebb3901'} {{(pid=63024) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2256.916478] env[63024]: DEBUG nova.network.neutron [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Updating instance_info_cache with network_info: [{"id": "776a40cb-b4fc-40ff-9fda-a77dca4c0001", "address": "fa:16:3e:83:a3:48", "network": {"id": "c25a78c5-2c6d-4c87-8575-9620fbc983bd", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1693993539-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "93098ad83ae144bf90a12c97ec863c06", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e66c4ebe-f808-4b34-bdb5-6c45edb1736f", "external-id": "cl2-zone-719", "segmentation_id": 719, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap776a40cb-b4", "ovs_interfaceid": "776a40cb-b4fc-40ff-9fda-a77dca4c0001", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2256.953206] env[63024]: DEBUG oslo_vmware.api [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951992, 'name': CloneVM_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2257.416767] env[63024]: DEBUG nova.objects.instance [None req-5f19f009-a830-4b7e-bbae-ab32efe4cac2 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lazy-loading 'flavor' on Instance uuid 5919cc21-67b8-47d4-9909-bc972b42914d {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2257.418823] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Releasing lock "refresh_cache-1013a279-f79d-467e-a37e-7e66f77db625" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2257.419061] env[63024]: DEBUG nova.objects.instance [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lazy-loading 'migration_context' on Instance uuid 1013a279-f79d-467e-a37e-7e66f77db625 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2257.453884] env[63024]: DEBUG oslo_vmware.api [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1951992, 'name': CloneVM_Task, 'duration_secs': 1.081683} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2257.454141] env[63024]: INFO nova.virt.vmwareapi.vmops [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Created linked-clone VM from snapshot [ 2257.454853] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c67319e2-c2c3-4133-ad5a-bd3e073a7a26 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2257.462212] env[63024]: DEBUG nova.virt.vmwareapi.images [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Uploading image 6367336c-9163-4307-b69a-e0e54fef2edf {{(pid=63024) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2257.490865] env[63024]: DEBUG oslo_vmware.rw_handles [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 2257.490865] env[63024]: value = "vm-402281" [ 2257.490865] env[63024]: _type = "VirtualMachine" [ 2257.490865] env[63024]: }. {{(pid=63024) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 2257.491157] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-47fc3d66-774c-41c3-adbb-d893ea935fa3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2257.499930] env[63024]: DEBUG oslo_vmware.rw_handles [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lease: (returnval){ [ 2257.499930] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]529cd730-16c8-3a72-911d-57396d28fda3" [ 2257.499930] env[63024]: _type = "HttpNfcLease" [ 2257.499930] env[63024]: } obtained for exporting VM: (result){ [ 2257.499930] env[63024]: value = "vm-402281" [ 2257.499930] env[63024]: _type = "VirtualMachine" [ 2257.499930] env[63024]: }. {{(pid=63024) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 2257.499930] env[63024]: DEBUG oslo_vmware.api [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the lease: (returnval){ [ 2257.499930] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]529cd730-16c8-3a72-911d-57396d28fda3" [ 2257.499930] env[63024]: _type = "HttpNfcLease" [ 2257.499930] env[63024]: } to be ready. {{(pid=63024) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2257.504700] env[63024]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2257.504700] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]529cd730-16c8-3a72-911d-57396d28fda3" [ 2257.504700] env[63024]: _type = "HttpNfcLease" [ 2257.504700] env[63024]: } is initializing. {{(pid=63024) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2257.518068] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8f3643dd-0d59-4d50-9e43-f60e49f5fb92 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "refresh_cache-8901e234-22a9-4523-8658-411aa19e01e0" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2257.518068] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8f3643dd-0d59-4d50-9e43-f60e49f5fb92 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquired lock "refresh_cache-8901e234-22a9-4523-8658-411aa19e01e0" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2257.518068] env[63024]: DEBUG nova.network.neutron [None req-8f3643dd-0d59-4d50-9e43-f60e49f5fb92 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2257.921686] env[63024]: DEBUG nova.objects.base [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Object Instance<1013a279-f79d-467e-a37e-7e66f77db625> lazy-loaded attributes: info_cache,migration_context {{(pid=63024) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2257.923798] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a059837-2c88-4331-aa84-e25dfe68a2f3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2257.944284] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed8fae21-59c0-4dd8-bd6a-fd3fb35266e4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2257.949840] env[63024]: DEBUG oslo_vmware.api [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2257.949840] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52ef9aab-c973-37da-fd5a-bed6f4f77024" [ 2257.949840] env[63024]: _type = "Task" [ 2257.949840] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2257.958584] env[63024]: DEBUG oslo_vmware.api [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52ef9aab-c973-37da-fd5a-bed6f4f77024, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2258.006203] env[63024]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2258.006203] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]529cd730-16c8-3a72-911d-57396d28fda3" [ 2258.006203] env[63024]: _type = "HttpNfcLease" [ 2258.006203] env[63024]: } is ready. {{(pid=63024) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2258.006464] env[63024]: DEBUG oslo_vmware.rw_handles [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2258.006464] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]529cd730-16c8-3a72-911d-57396d28fda3" [ 2258.006464] env[63024]: _type = "HttpNfcLease" [ 2258.006464] env[63024]: }. {{(pid=63024) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 2258.007182] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ab8739f-918d-4234-9b74-4e4e1d2c8622 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2258.014528] env[63024]: DEBUG oslo_vmware.rw_handles [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529a351e-e212-59b7-13c4-a360e1e858ae/disk-0.vmdk from lease info. {{(pid=63024) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2258.014708] env[63024]: DEBUG oslo_vmware.rw_handles [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529a351e-e212-59b7-13c4-a360e1e858ae/disk-0.vmdk for reading. {{(pid=63024) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 2258.103108] env[63024]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-27213118-c8a1-4642-9154-b929527f905e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2258.297371] env[63024]: DEBUG nova.network.neutron [None req-8f3643dd-0d59-4d50-9e43-f60e49f5fb92 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Updating instance_info_cache with network_info: [{"id": "f00f50c5-72cc-47ac-b97d-c507d47aa150", "address": "fa:16:3e:39:d0:25", "network": {"id": "0cbe22f7-6322-4d92-9a77-2753f6449a2d", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-366684254-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.141", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "6072e8931d9540ad8fe4a2b4b1ec782d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f3ccbdbb-8b49-4a26-913f-2a448b72280f", "external-id": "nsx-vlan-transportzone-412", "segmentation_id": 412, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf00f50c5-72", "ovs_interfaceid": "f00f50c5-72cc-47ac-b97d-c507d47aa150", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2258.427621] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5f19f009-a830-4b7e-bbae-ab32efe4cac2 tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "5919cc21-67b8-47d4-9909-bc972b42914d" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.228s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2258.460492] env[63024]: DEBUG oslo_vmware.api [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52ef9aab-c973-37da-fd5a-bed6f4f77024, 'name': SearchDatastore_Task, 'duration_secs': 0.008077} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2258.461060] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2258.461481] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2258.800128] env[63024]: DEBUG oslo_concurrency.lockutils [None req-8f3643dd-0d59-4d50-9e43-f60e49f5fb92 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Releasing lock "refresh_cache-8901e234-22a9-4523-8658-411aa19e01e0" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2258.801448] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab14ef2f-aa4d-4a1f-81a5-949a30042804 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2258.808596] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-8f3643dd-0d59-4d50-9e43-f60e49f5fb92 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Resuming the VM {{(pid=63024) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 2258.808978] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7b508643-d591-4f60-85ad-655288896bef {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2258.816391] env[63024]: DEBUG oslo_vmware.api [None req-8f3643dd-0d59-4d50-9e43-f60e49f5fb92 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 2258.816391] env[63024]: value = "task-1951995" [ 2258.816391] env[63024]: _type = "Task" [ 2258.816391] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2258.825203] env[63024]: DEBUG oslo_vmware.api [None req-8f3643dd-0d59-4d50-9e43-f60e49f5fb92 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951995, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2259.060541] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a2c1cc2-5147-4a56-bd80-0bdab39504ad {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.069043] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3091b341-0cd9-4f5a-9af0-38c1313da42d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.102459] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0069a92-e3a0-4bc3-80fd-be1c9325de12 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.110488] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b9543a4-5fc5-436e-b5e4-3d4f39801ddd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.124873] env[63024]: DEBUG nova.compute.provider_tree [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2259.326246] env[63024]: DEBUG oslo_vmware.api [None req-8f3643dd-0d59-4d50-9e43-f60e49f5fb92 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951995, 'name': PowerOnVM_Task} progress is 93%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2259.456796] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1a8d164b-7765-4de4-ab51-3fc208fdca9d tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquiring lock "5919cc21-67b8-47d4-9909-bc972b42914d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2259.457242] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1a8d164b-7765-4de4-ab51-3fc208fdca9d tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "5919cc21-67b8-47d4-9909-bc972b42914d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2259.457650] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1a8d164b-7765-4de4-ab51-3fc208fdca9d tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquiring lock "5919cc21-67b8-47d4-9909-bc972b42914d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2259.457929] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1a8d164b-7765-4de4-ab51-3fc208fdca9d tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "5919cc21-67b8-47d4-9909-bc972b42914d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2259.458325] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1a8d164b-7765-4de4-ab51-3fc208fdca9d tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "5919cc21-67b8-47d4-9909-bc972b42914d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2259.460567] env[63024]: INFO nova.compute.manager [None req-1a8d164b-7765-4de4-ab51-3fc208fdca9d tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Terminating instance [ 2259.646885] env[63024]: ERROR nova.scheduler.client.report [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [req-eac07874-fbc7-47ce-a8c7-634ea9649e41] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 89dfa68a-133e-436f-a9f1-86051f9fb96b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-eac07874-fbc7-47ce-a8c7-634ea9649e41"}]} [ 2259.664052] env[63024]: DEBUG nova.scheduler.client.report [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Refreshing inventories for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 2259.682023] env[63024]: DEBUG nova.scheduler.client.report [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Updating ProviderTree inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 2259.682428] env[63024]: DEBUG nova.compute.provider_tree [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2259.695269] env[63024]: DEBUG nova.scheduler.client.report [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Refreshing aggregate associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, aggregates: None {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 2259.715623] env[63024]: DEBUG nova.scheduler.client.report [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Refreshing trait associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 2259.829336] env[63024]: DEBUG oslo_vmware.api [None req-8f3643dd-0d59-4d50-9e43-f60e49f5fb92 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951995, 'name': PowerOnVM_Task, 'duration_secs': 0.698376} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2259.829718] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-8f3643dd-0d59-4d50-9e43-f60e49f5fb92 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Resumed the VM {{(pid=63024) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 2259.829955] env[63024]: DEBUG nova.compute.manager [None req-8f3643dd-0d59-4d50-9e43-f60e49f5fb92 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2259.830841] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70bd29ff-fc0a-4d89-bac1-553a942f1476 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.889615] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-935ebe61-4c80-4040-8fcb-30154764d80b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.898759] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66217210-b711-443e-968f-7bb88c70622a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.932788] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ede840f3-9c7e-4e8c-9361-385c4f77a58a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.940826] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5da2569-8f5b-4de3-9a01-5ced26d77901 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.955256] env[63024]: DEBUG nova.compute.provider_tree [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2259.964345] env[63024]: DEBUG nova.compute.manager [None req-1a8d164b-7765-4de4-ab51-3fc208fdca9d tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2259.964552] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1a8d164b-7765-4de4-ab51-3fc208fdca9d tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2259.965326] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab8b434f-c760-41ee-85fe-47c8c542e5e3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.973374] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a8d164b-7765-4de4-ab51-3fc208fdca9d tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2259.973692] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-832e735d-a48a-4c57-8232-905261688ac1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2259.984440] env[63024]: DEBUG oslo_vmware.api [None req-1a8d164b-7765-4de4-ab51-3fc208fdca9d tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 2259.984440] env[63024]: value = "task-1951996" [ 2259.984440] env[63024]: _type = "Task" [ 2259.984440] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2259.992236] env[63024]: DEBUG oslo_vmware.api [None req-1a8d164b-7765-4de4-ab51-3fc208fdca9d tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951996, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2260.485981] env[63024]: DEBUG nova.scheduler.client.report [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Updated inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with generation 191 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 2260.486332] env[63024]: DEBUG nova.compute.provider_tree [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Updating resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b generation from 191 to 192 during operation: update_inventory {{(pid=63024) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2260.486518] env[63024]: DEBUG nova.compute.provider_tree [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2260.499247] env[63024]: DEBUG oslo_vmware.api [None req-1a8d164b-7765-4de4-ab51-3fc208fdca9d tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951996, 'name': PowerOffVM_Task, 'duration_secs': 0.204734} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2260.499494] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a8d164b-7765-4de4-ab51-3fc208fdca9d tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2260.499662] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1a8d164b-7765-4de4-ab51-3fc208fdca9d tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2260.499912] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-400c6e45-2e93-40c7-ab7f-d507e69890b8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2261.206569] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94d96fec-bfcf-424d-9ebb-c10230e89b50 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "8901e234-22a9-4523-8658-411aa19e01e0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2261.206891] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94d96fec-bfcf-424d-9ebb-c10230e89b50 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "8901e234-22a9-4523-8658-411aa19e01e0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2261.207118] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94d96fec-bfcf-424d-9ebb-c10230e89b50 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "8901e234-22a9-4523-8658-411aa19e01e0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2261.207306] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94d96fec-bfcf-424d-9ebb-c10230e89b50 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "8901e234-22a9-4523-8658-411aa19e01e0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2261.207484] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94d96fec-bfcf-424d-9ebb-c10230e89b50 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "8901e234-22a9-4523-8658-411aa19e01e0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2261.209666] env[63024]: INFO nova.compute.manager [None req-94d96fec-bfcf-424d-9ebb-c10230e89b50 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Terminating instance [ 2261.232818] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1a8d164b-7765-4de4-ab51-3fc208fdca9d tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2261.233063] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1a8d164b-7765-4de4-ab51-3fc208fdca9d tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2261.233252] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a8d164b-7765-4de4-ab51-3fc208fdca9d tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Deleting the datastore file [datastore1] 5919cc21-67b8-47d4-9909-bc972b42914d {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2261.233520] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-003ca92e-204f-46fb-aa9d-3e20b8ef1cf8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2261.241458] env[63024]: DEBUG oslo_vmware.api [None req-1a8d164b-7765-4de4-ab51-3fc208fdca9d tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for the task: (returnval){ [ 2261.241458] env[63024]: value = "task-1951998" [ 2261.241458] env[63024]: _type = "Task" [ 2261.241458] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2261.249553] env[63024]: DEBUG oslo_vmware.api [None req-1a8d164b-7765-4de4-ab51-3fc208fdca9d tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951998, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2261.498904] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 3.037s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2261.713398] env[63024]: DEBUG nova.compute.manager [None req-94d96fec-bfcf-424d-9ebb-c10230e89b50 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2261.713719] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-94d96fec-bfcf-424d-9ebb-c10230e89b50 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2261.714708] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5e2bb30-9f37-4b93-9757-f39888fdfb8f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2261.722671] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-94d96fec-bfcf-424d-9ebb-c10230e89b50 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2261.722920] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-85db0b96-d065-4bac-820a-f3943682dda8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2261.729904] env[63024]: DEBUG oslo_vmware.api [None req-94d96fec-bfcf-424d-9ebb-c10230e89b50 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 2261.729904] env[63024]: value = "task-1951999" [ 2261.729904] env[63024]: _type = "Task" [ 2261.729904] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2261.738396] env[63024]: DEBUG oslo_vmware.api [None req-94d96fec-bfcf-424d-9ebb-c10230e89b50 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951999, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2261.751869] env[63024]: DEBUG oslo_vmware.api [None req-1a8d164b-7765-4de4-ab51-3fc208fdca9d tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Task: {'id': task-1951998, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.221195} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2261.752151] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a8d164b-7765-4de4-ab51-3fc208fdca9d tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2261.752365] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1a8d164b-7765-4de4-ab51-3fc208fdca9d tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2261.752559] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1a8d164b-7765-4de4-ab51-3fc208fdca9d tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2261.752759] env[63024]: INFO nova.compute.manager [None req-1a8d164b-7765-4de4-ab51-3fc208fdca9d tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Took 1.79 seconds to destroy the instance on the hypervisor. [ 2261.753071] env[63024]: DEBUG oslo.service.loopingcall [None req-1a8d164b-7765-4de4-ab51-3fc208fdca9d tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2261.753302] env[63024]: DEBUG nova.compute.manager [-] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2261.753407] env[63024]: DEBUG nova.network.neutron [-] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2262.066578] env[63024]: INFO nova.scheduler.client.report [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Deleted allocation for migration 965540a2-c1cb-4023-a02f-158a849d197f [ 2262.241604] env[63024]: DEBUG oslo_vmware.api [None req-94d96fec-bfcf-424d-9ebb-c10230e89b50 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1951999, 'name': PowerOffVM_Task, 'duration_secs': 0.210276} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2262.241821] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-94d96fec-bfcf-424d-9ebb-c10230e89b50 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2262.242241] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-94d96fec-bfcf-424d-9ebb-c10230e89b50 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2262.244444] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-edb25c7d-02aa-42bd-b8c2-6449694360bd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.344325] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-94d96fec-bfcf-424d-9ebb-c10230e89b50 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2262.344325] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-94d96fec-bfcf-424d-9ebb-c10230e89b50 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2262.344325] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-94d96fec-bfcf-424d-9ebb-c10230e89b50 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Deleting the datastore file [datastore1] 8901e234-22a9-4523-8658-411aa19e01e0 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2262.344325] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3f0e1ba0-b008-4569-a47d-8031242a7fbe {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2262.351785] env[63024]: DEBUG oslo_vmware.api [None req-94d96fec-bfcf-424d-9ebb-c10230e89b50 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for the task: (returnval){ [ 2262.351785] env[63024]: value = "task-1952001" [ 2262.351785] env[63024]: _type = "Task" [ 2262.351785] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2262.361779] env[63024]: DEBUG oslo_vmware.api [None req-94d96fec-bfcf-424d-9ebb-c10230e89b50 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1952001, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2262.505456] env[63024]: DEBUG nova.compute.manager [req-b62c8550-c829-485b-81e3-2850e9a21993 req-27929f34-352b-41fc-bf5d-72433fc9c58b service nova] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Received event network-vif-deleted-f3bc6bf4-4559-4a56-b6cb-c71a44d6651b {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2262.505885] env[63024]: INFO nova.compute.manager [req-b62c8550-c829-485b-81e3-2850e9a21993 req-27929f34-352b-41fc-bf5d-72433fc9c58b service nova] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Neutron deleted interface f3bc6bf4-4559-4a56-b6cb-c71a44d6651b; detaching it from the instance and deleting it from the info cache [ 2262.505885] env[63024]: DEBUG nova.network.neutron [req-b62c8550-c829-485b-81e3-2850e9a21993 req-27929f34-352b-41fc-bf5d-72433fc9c58b service nova] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2262.574263] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "1013a279-f79d-467e-a37e-7e66f77db625" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 7.401s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2262.863263] env[63024]: DEBUG oslo_vmware.api [None req-94d96fec-bfcf-424d-9ebb-c10230e89b50 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Task: {'id': task-1952001, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.177682} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2262.863616] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-94d96fec-bfcf-424d-9ebb-c10230e89b50 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2262.863847] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-94d96fec-bfcf-424d-9ebb-c10230e89b50 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2262.864048] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-94d96fec-bfcf-424d-9ebb-c10230e89b50 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2262.864282] env[63024]: INFO nova.compute.manager [None req-94d96fec-bfcf-424d-9ebb-c10230e89b50 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Took 1.15 seconds to destroy the instance on the hypervisor. [ 2262.864600] env[63024]: DEBUG oslo.service.loopingcall [None req-94d96fec-bfcf-424d-9ebb-c10230e89b50 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2262.864810] env[63024]: DEBUG nova.compute.manager [-] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2262.864909] env[63024]: DEBUG nova.network.neutron [-] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2262.984392] env[63024]: DEBUG nova.network.neutron [-] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2263.008381] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bdd0d7ed-e3fd-4d33-9f2f-9af99c2ab8c3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2263.029215] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5512c73e-38fd-4946-8564-2af3f4ca6862 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2263.853131] env[63024]: INFO nova.compute.manager [-] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Took 2.10 seconds to deallocate network for instance. [ 2263.853827] env[63024]: DEBUG nova.compute.manager [req-b62c8550-c829-485b-81e3-2850e9a21993 req-27929f34-352b-41fc-bf5d-72433fc9c58b service nova] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Detach interface failed, port_id=f3bc6bf4-4559-4a56-b6cb-c71a44d6651b, reason: Instance 5919cc21-67b8-47d4-9909-bc972b42914d could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 2264.358010] env[63024]: DEBUG nova.network.neutron [-] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2264.361312] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1a8d164b-7765-4de4-ab51-3fc208fdca9d tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2264.361615] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1a8d164b-7765-4de4-ab51-3fc208fdca9d tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2264.361875] env[63024]: DEBUG nova.objects.instance [None req-1a8d164b-7765-4de4-ab51-3fc208fdca9d tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lazy-loading 'resources' on Instance uuid 5919cc21-67b8-47d4-9909-bc972b42914d {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2264.534125] env[63024]: DEBUG nova.compute.manager [req-b40c27df-ee7d-412c-ae07-c0783adcc2ff req-955b9a59-70aa-475d-b24e-1e176d0a8be8 service nova] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Received event network-vif-deleted-f00f50c5-72cc-47ac-b97d-c507d47aa150 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2264.704348] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "1013a279-f79d-467e-a37e-7e66f77db625" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2264.704621] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "1013a279-f79d-467e-a37e-7e66f77db625" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2264.704704] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "1013a279-f79d-467e-a37e-7e66f77db625-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2264.705105] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "1013a279-f79d-467e-a37e-7e66f77db625-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2264.705105] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "1013a279-f79d-467e-a37e-7e66f77db625-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2264.707430] env[63024]: INFO nova.compute.manager [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Terminating instance [ 2264.861420] env[63024]: INFO nova.compute.manager [-] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Took 2.00 seconds to deallocate network for instance. [ 2264.946627] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4c23318-9735-4e4b-8703-7837a4f366b2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.955224] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5f2c141-7227-4d97-8832-442746a4b962 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.987617] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f661d09a-dc2e-4a4c-9da5-b2119609ebb3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2264.995541] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f08941c3-1337-491f-ae8f-6a6270138713 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.010011] env[63024]: DEBUG nova.compute.provider_tree [None req-1a8d164b-7765-4de4-ab51-3fc208fdca9d tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2265.211051] env[63024]: DEBUG nova.compute.manager [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2265.211298] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2265.212244] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fde6dce-9433-425e-b56d-5ec3f2a5dd80 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.220644] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2265.220904] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c1ebadbf-7a24-4957-ba32-c13f4c26d120 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.227976] env[63024]: DEBUG oslo_vmware.api [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2265.227976] env[63024]: value = "task-1952002" [ 2265.227976] env[63024]: _type = "Task" [ 2265.227976] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2265.237188] env[63024]: DEBUG oslo_vmware.api [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1952002, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2265.370220] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94d96fec-bfcf-424d-9ebb-c10230e89b50 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2265.529507] env[63024]: ERROR nova.scheduler.client.report [None req-1a8d164b-7765-4de4-ab51-3fc208fdca9d tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] [req-af7c7f4a-f639-4f77-8418-a21b68a5d156] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 89dfa68a-133e-436f-a9f1-86051f9fb96b. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-af7c7f4a-f639-4f77-8418-a21b68a5d156"}]} [ 2265.545331] env[63024]: DEBUG nova.scheduler.client.report [None req-1a8d164b-7765-4de4-ab51-3fc208fdca9d tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Refreshing inventories for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:819}} [ 2265.560030] env[63024]: DEBUG nova.scheduler.client.report [None req-1a8d164b-7765-4de4-ab51-3fc208fdca9d tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Updating ProviderTree inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:783}} [ 2265.560030] env[63024]: DEBUG nova.compute.provider_tree [None req-1a8d164b-7765-4de4-ab51-3fc208fdca9d tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 170, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2265.570965] env[63024]: DEBUG nova.scheduler.client.report [None req-1a8d164b-7765-4de4-ab51-3fc208fdca9d tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Refreshing aggregate associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, aggregates: None {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:828}} [ 2265.589053] env[63024]: DEBUG nova.scheduler.client.report [None req-1a8d164b-7765-4de4-ab51-3fc208fdca9d tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Refreshing trait associations for resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b, traits: HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NODE,COMPUTE_SAME_HOST_COLD_MIGRATE {{(pid=63024) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:840}} [ 2265.658406] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d32eabd2-fbed-4045-9185-cdd5c875bce6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.666163] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de9bfee0-7c05-400a-a07f-adb99b9f5336 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.696172] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc10b5ae-6f41-4be4-b9ee-e2bd1e0823a1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.703774] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c229b73-80bb-43c6-86f9-f6e07e234f03 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.719156] env[63024]: DEBUG nova.compute.provider_tree [None req-1a8d164b-7765-4de4-ab51-3fc208fdca9d tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2265.737756] env[63024]: DEBUG oslo_vmware.api [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1952002, 'name': PowerOffVM_Task, 'duration_secs': 0.20123} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2265.738167] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2265.738353] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2265.738604] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5c442989-25af-457b-aacd-4fd61a1b0a2d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.939576] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2265.939994] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2265.940121] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Deleting the datastore file [datastore1] 1013a279-f79d-467e-a37e-7e66f77db625 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2265.940513] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-864e48fe-3fee-4efa-92ac-d64fd04a70a2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2265.948535] env[63024]: DEBUG oslo_vmware.api [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for the task: (returnval){ [ 2265.948535] env[63024]: value = "task-1952004" [ 2265.948535] env[63024]: _type = "Task" [ 2265.948535] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2265.956625] env[63024]: DEBUG oslo_vmware.api [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1952004, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2266.250596] env[63024]: DEBUG nova.scheduler.client.report [None req-1a8d164b-7765-4de4-ab51-3fc208fdca9d tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Updated inventory for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with generation 193 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:972}} [ 2266.250997] env[63024]: DEBUG nova.compute.provider_tree [None req-1a8d164b-7765-4de4-ab51-3fc208fdca9d tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Updating resource provider 89dfa68a-133e-436f-a9f1-86051f9fb96b generation from 193 to 194 during operation: update_inventory {{(pid=63024) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 2266.251267] env[63024]: DEBUG nova.compute.provider_tree [None req-1a8d164b-7765-4de4-ab51-3fc208fdca9d tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Updating inventory in ProviderTree for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2266.458564] env[63024]: DEBUG oslo_vmware.api [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Task: {'id': task-1952004, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.161536} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2266.458841] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2266.458999] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2266.459206] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2266.459378] env[63024]: INFO nova.compute.manager [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Took 1.25 seconds to destroy the instance on the hypervisor. [ 2266.459615] env[63024]: DEBUG oslo.service.loopingcall [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2266.459844] env[63024]: DEBUG nova.compute.manager [-] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2266.459931] env[63024]: DEBUG nova.network.neutron [-] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2266.557506] env[63024]: DEBUG oslo_vmware.rw_handles [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529a351e-e212-59b7-13c4-a360e1e858ae/disk-0.vmdk. {{(pid=63024) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2266.558438] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52b974dc-ab3b-4c42-b372-702bb555b654 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.564820] env[63024]: DEBUG oslo_vmware.rw_handles [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529a351e-e212-59b7-13c4-a360e1e858ae/disk-0.vmdk is in state: ready. {{(pid=63024) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2266.565022] env[63024]: ERROR oslo_vmware.rw_handles [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529a351e-e212-59b7-13c4-a360e1e858ae/disk-0.vmdk due to incomplete transfer. [ 2266.565250] env[63024]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-d3454cf3-25ca-49a8-9cbf-d657d41c3d95 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.573976] env[63024]: DEBUG oslo_vmware.rw_handles [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/529a351e-e212-59b7-13c4-a360e1e858ae/disk-0.vmdk. {{(pid=63024) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 2266.573976] env[63024]: DEBUG nova.virt.vmwareapi.images [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Uploaded image 6367336c-9163-4307-b69a-e0e54fef2edf to the Glance image server {{(pid=63024) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 2266.575805] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Destroying the VM {{(pid=63024) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2266.576065] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-ceca2b5b-2901-4230-966e-179642bcd793 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2266.583181] env[63024]: DEBUG oslo_vmware.api [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2266.583181] env[63024]: value = "task-1952005" [ 2266.583181] env[63024]: _type = "Task" [ 2266.583181] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2266.591257] env[63024]: DEBUG oslo_vmware.api [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1952005, 'name': Destroy_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2266.759645] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1a8d164b-7765-4de4-ab51-3fc208fdca9d tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.396s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2266.762070] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94d96fec-bfcf-424d-9ebb-c10230e89b50 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.392s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2266.762512] env[63024]: DEBUG nova.objects.instance [None req-94d96fec-bfcf-424d-9ebb-c10230e89b50 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lazy-loading 'resources' on Instance uuid 8901e234-22a9-4523-8658-411aa19e01e0 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2266.789803] env[63024]: DEBUG nova.compute.manager [req-94b62e5a-b04d-49c7-b81c-dd979f8239de req-96d8e4cd-2aa2-4cc2-a16e-bd378c4fdf57 service nova] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Received event network-vif-deleted-776a40cb-b4fc-40ff-9fda-a77dca4c0001 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2266.789999] env[63024]: INFO nova.compute.manager [req-94b62e5a-b04d-49c7-b81c-dd979f8239de req-96d8e4cd-2aa2-4cc2-a16e-bd378c4fdf57 service nova] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Neutron deleted interface 776a40cb-b4fc-40ff-9fda-a77dca4c0001; detaching it from the instance and deleting it from the info cache [ 2266.790191] env[63024]: DEBUG nova.network.neutron [req-94b62e5a-b04d-49c7-b81c-dd979f8239de req-96d8e4cd-2aa2-4cc2-a16e-bd378c4fdf57 service nova] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2266.791894] env[63024]: INFO nova.scheduler.client.report [None req-1a8d164b-7765-4de4-ab51-3fc208fdca9d tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Deleted allocations for instance 5919cc21-67b8-47d4-9909-bc972b42914d [ 2267.093937] env[63024]: DEBUG oslo_vmware.api [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1952005, 'name': Destroy_Task, 'duration_secs': 0.451745} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2267.094266] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Destroyed the VM [ 2267.094433] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Deleting Snapshot of the VM instance {{(pid=63024) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2267.094683] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-f81654d8-d1e4-440f-9a08-9a698b297bad {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.100764] env[63024]: DEBUG oslo_vmware.api [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2267.100764] env[63024]: value = "task-1952006" [ 2267.100764] env[63024]: _type = "Task" [ 2267.100764] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2267.108419] env[63024]: DEBUG oslo_vmware.api [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1952006, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2267.270199] env[63024]: DEBUG nova.network.neutron [-] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2267.294383] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ad159b05-983f-45c2-b8fe-0a630f07a054 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.304458] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1a8d164b-7765-4de4-ab51-3fc208fdca9d tempest-AttachVolumeNegativeTest-1742414548 tempest-AttachVolumeNegativeTest-1742414548-project-member] Lock "5919cc21-67b8-47d4-9909-bc972b42914d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.847s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2267.311301] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ee726cb-63c9-42ec-a0d9-dd092d598f61 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.342857] env[63024]: DEBUG nova.compute.manager [req-94b62e5a-b04d-49c7-b81c-dd979f8239de req-96d8e4cd-2aa2-4cc2-a16e-bd378c4fdf57 service nova] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Detach interface failed, port_id=776a40cb-b4fc-40ff-9fda-a77dca4c0001, reason: Instance 1013a279-f79d-467e-a37e-7e66f77db625 could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 2267.362845] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee42f17b-058c-46fd-a21f-ec57bbba1c26 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.370277] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d1387b0-020d-4d4c-b5df-7b2006efcdd5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.400506] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b02bf08-39d7-4c91-a007-8c4f62196b1f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.408494] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-054162a0-dab3-485d-a7a5-5c7eb94596e3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.423349] env[63024]: DEBUG nova.compute.provider_tree [None req-94d96fec-bfcf-424d-9ebb-c10230e89b50 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2267.611412] env[63024]: DEBUG oslo_vmware.api [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1952006, 'name': RemoveSnapshot_Task, 'duration_secs': 0.35474} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2267.611695] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Deleted Snapshot of the VM instance {{(pid=63024) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2267.611977] env[63024]: DEBUG nova.compute.manager [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2267.612750] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fb9f3b2-ade3-494c-852a-d33a6ff8e20e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2267.777273] env[63024]: INFO nova.compute.manager [-] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Took 1.31 seconds to deallocate network for instance. [ 2267.926483] env[63024]: DEBUG nova.scheduler.client.report [None req-94d96fec-bfcf-424d-9ebb-c10230e89b50 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2268.124510] env[63024]: INFO nova.compute.manager [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Shelve offloading [ 2268.279982] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2268.433022] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94d96fec-bfcf-424d-9ebb-c10230e89b50 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.671s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2268.435343] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.156s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2268.435549] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2268.456525] env[63024]: INFO nova.scheduler.client.report [None req-94d96fec-bfcf-424d-9ebb-c10230e89b50 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Deleted allocations for instance 8901e234-22a9-4523-8658-411aa19e01e0 [ 2268.458149] env[63024]: INFO nova.scheduler.client.report [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Deleted allocations for instance 1013a279-f79d-467e-a37e-7e66f77db625 [ 2268.628531] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2268.628842] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-70e340e7-769e-4ae0-bec9-74b020b8d655 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2268.636172] env[63024]: DEBUG oslo_vmware.api [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2268.636172] env[63024]: value = "task-1952008" [ 2268.636172] env[63024]: _type = "Task" [ 2268.636172] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2268.643568] env[63024]: DEBUG oslo_vmware.api [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1952008, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2268.969315] env[63024]: DEBUG oslo_concurrency.lockutils [None req-94d96fec-bfcf-424d-9ebb-c10230e89b50 tempest-ServerActionsTestJSON-64550346 tempest-ServerActionsTestJSON-64550346-project-member] Lock "8901e234-22a9-4523-8658-411aa19e01e0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.762s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2268.970365] env[63024]: DEBUG oslo_concurrency.lockutils [None req-1f4ff60a-b9cf-49b3-b7a4-6725481488ea tempest-DeleteServersTestJSON-7563133 tempest-DeleteServersTestJSON-7563133-project-member] Lock "1013a279-f79d-467e-a37e-7e66f77db625" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.266s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2269.147466] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] VM already powered off {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2269.147790] env[63024]: DEBUG nova.compute.manager [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2269.148655] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f663537-3d90-40c9-a3b5-b3f30c655943 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2269.155320] env[63024]: DEBUG oslo_concurrency.lockutils [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "refresh_cache-7d78b891-34c0-46dd-8b0d-ce80517232e1" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2269.155478] env[63024]: DEBUG oslo_concurrency.lockutils [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquired lock "refresh_cache-7d78b891-34c0-46dd-8b0d-ce80517232e1" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2269.155643] env[63024]: DEBUG nova.network.neutron [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2269.567370] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2269.567626] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2269.567779] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Starting heal instance info cache {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10258}} [ 2269.567898] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Rebuilding the list of instances to heal {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10262}} [ 2269.965763] env[63024]: DEBUG nova.network.neutron [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Updating instance_info_cache with network_info: [{"id": "4dc6f8a3-c6c8-459b-8a05-db8924adb128", "address": "fa:16:3e:74:e2:14", "network": {"id": "27687c23-521d-4beb-ad4f-278994149c2c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-381619610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.138", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e53c02ad56640dc8cbc8839669b67bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "09bf081b-cdf0-4977-abe2-2339a87409ab", "external-id": "nsx-vlan-transportzone-378", "segmentation_id": 378, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4dc6f8a3-c6", "ovs_interfaceid": "4dc6f8a3-c6c8-459b-8a05-db8924adb128", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2270.096501] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "refresh_cache-14bafeba-9f5b-4488-b29c-38939973deb9" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2270.096659] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquired lock "refresh_cache-14bafeba-9f5b-4488-b29c-38939973deb9" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2270.096832] env[63024]: DEBUG nova.network.neutron [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Forcefully refreshing network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2270.096987] env[63024]: DEBUG nova.objects.instance [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lazy-loading 'info_cache' on Instance uuid 14bafeba-9f5b-4488-b29c-38939973deb9 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2270.468446] env[63024]: DEBUG oslo_concurrency.lockutils [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Releasing lock "refresh_cache-7d78b891-34c0-46dd-8b0d-ce80517232e1" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2270.710028] env[63024]: DEBUG nova.compute.manager [req-a3edfdea-67c9-4b8c-9501-5d4ac58f4f23 req-13f9988d-8309-4b2d-8b0c-21c641640e2f service nova] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Received event network-vif-unplugged-4dc6f8a3-c6c8-459b-8a05-db8924adb128 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2270.710028] env[63024]: DEBUG oslo_concurrency.lockutils [req-a3edfdea-67c9-4b8c-9501-5d4ac58f4f23 req-13f9988d-8309-4b2d-8b0c-21c641640e2f service nova] Acquiring lock "7d78b891-34c0-46dd-8b0d-ce80517232e1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2270.710028] env[63024]: DEBUG oslo_concurrency.lockutils [req-a3edfdea-67c9-4b8c-9501-5d4ac58f4f23 req-13f9988d-8309-4b2d-8b0c-21c641640e2f service nova] Lock "7d78b891-34c0-46dd-8b0d-ce80517232e1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2270.710028] env[63024]: DEBUG oslo_concurrency.lockutils [req-a3edfdea-67c9-4b8c-9501-5d4ac58f4f23 req-13f9988d-8309-4b2d-8b0c-21c641640e2f service nova] Lock "7d78b891-34c0-46dd-8b0d-ce80517232e1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2270.710315] env[63024]: DEBUG nova.compute.manager [req-a3edfdea-67c9-4b8c-9501-5d4ac58f4f23 req-13f9988d-8309-4b2d-8b0c-21c641640e2f service nova] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] No waiting events found dispatching network-vif-unplugged-4dc6f8a3-c6c8-459b-8a05-db8924adb128 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2270.710442] env[63024]: WARNING nova.compute.manager [req-a3edfdea-67c9-4b8c-9501-5d4ac58f4f23 req-13f9988d-8309-4b2d-8b0c-21c641640e2f service nova] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Received unexpected event network-vif-unplugged-4dc6f8a3-c6c8-459b-8a05-db8924adb128 for instance with vm_state shelved and task_state shelving_offloading. [ 2270.795734] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2270.796762] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b730c0e-55f9-4272-a8f0-573f843fe362 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.804838] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2270.805112] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-13dabdea-9a41-4e03-b384-70a94b688d2c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.980690] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2270.981142] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2270.981619] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Deleting the datastore file [datastore1] 7d78b891-34c0-46dd-8b0d-ce80517232e1 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2270.981757] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bc0ae05c-b60f-465f-9c27-be55d735d8b9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2270.988211] env[63024]: DEBUG oslo_vmware.api [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2270.988211] env[63024]: value = "task-1952011" [ 2270.988211] env[63024]: _type = "Task" [ 2270.988211] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2270.997168] env[63024]: DEBUG oslo_vmware.api [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1952011, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2271.497077] env[63024]: DEBUG oslo_vmware.api [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1952011, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.130539} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2271.497363] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2271.497546] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2271.497721] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2271.518566] env[63024]: INFO nova.scheduler.client.report [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Deleted allocations for instance 7d78b891-34c0-46dd-8b0d-ce80517232e1 [ 2271.865203] env[63024]: DEBUG nova.network.neutron [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Updating instance_info_cache with network_info: [{"id": "4ba24717-2947-46f0-9df8-733d8b40c345", "address": "fa:16:3e:3b:41:f5", "network": {"id": "5503f5f5-9a63-4bb8-bc39-97863100c3df", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1218611362-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd0c44555e30414c83750b762e243dc1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4ba24717-29", "ovs_interfaceid": "4ba24717-2947-46f0-9df8-733d8b40c345", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2272.023544] env[63024]: DEBUG oslo_concurrency.lockutils [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2272.023915] env[63024]: DEBUG oslo_concurrency.lockutils [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2272.024012] env[63024]: DEBUG nova.objects.instance [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lazy-loading 'resources' on Instance uuid 7d78b891-34c0-46dd-8b0d-ce80517232e1 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2272.368248] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Releasing lock "refresh_cache-14bafeba-9f5b-4488-b29c-38939973deb9" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2272.368459] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Updated the network info_cache for instance {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10329}} [ 2272.368664] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2272.368817] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2272.369317] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2272.369317] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2272.369317] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2272.369467] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2272.369510] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63024) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10877}} [ 2272.370512] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager.update_available_resource {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2272.526419] env[63024]: DEBUG nova.objects.instance [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lazy-loading 'numa_topology' on Instance uuid 7d78b891-34c0-46dd-8b0d-ce80517232e1 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2272.746846] env[63024]: DEBUG nova.compute.manager [req-724b8b9c-ff5c-4012-8cc2-1de0aa0f2bb1 req-7f35dcb0-5f3a-4e78-aa26-f85096e6d358 service nova] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Received event network-changed-4dc6f8a3-c6c8-459b-8a05-db8924adb128 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2272.748444] env[63024]: DEBUG nova.compute.manager [req-724b8b9c-ff5c-4012-8cc2-1de0aa0f2bb1 req-7f35dcb0-5f3a-4e78-aa26-f85096e6d358 service nova] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Refreshing instance network info cache due to event network-changed-4dc6f8a3-c6c8-459b-8a05-db8924adb128. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2272.748735] env[63024]: DEBUG oslo_concurrency.lockutils [req-724b8b9c-ff5c-4012-8cc2-1de0aa0f2bb1 req-7f35dcb0-5f3a-4e78-aa26-f85096e6d358 service nova] Acquiring lock "refresh_cache-7d78b891-34c0-46dd-8b0d-ce80517232e1" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2272.748928] env[63024]: DEBUG oslo_concurrency.lockutils [req-724b8b9c-ff5c-4012-8cc2-1de0aa0f2bb1 req-7f35dcb0-5f3a-4e78-aa26-f85096e6d358 service nova] Acquired lock "refresh_cache-7d78b891-34c0-46dd-8b0d-ce80517232e1" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2272.749148] env[63024]: DEBUG nova.network.neutron [req-724b8b9c-ff5c-4012-8cc2-1de0aa0f2bb1 req-7f35dcb0-5f3a-4e78-aa26-f85096e6d358 service nova] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Refreshing network info cache for port 4dc6f8a3-c6c8-459b-8a05-db8924adb128 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2272.875107] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2273.029254] env[63024]: DEBUG nova.objects.base [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Object Instance<7d78b891-34c0-46dd-8b0d-ce80517232e1> lazy-loaded attributes: resources,numa_topology {{(pid=63024) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2273.065784] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-611541fa-ce6f-4716-bb43-964a629ff038 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2273.073995] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-641156c8-ddbf-459d-a7d4-d1249df71120 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2273.104603] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00408d13-0791-4ab7-a589-de027327fadf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2273.112399] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77e6d983-5f47-4bca-a368-8bb8faf00b88 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2273.127230] env[63024]: DEBUG nova.compute.provider_tree [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2273.500193] env[63024]: DEBUG nova.network.neutron [req-724b8b9c-ff5c-4012-8cc2-1de0aa0f2bb1 req-7f35dcb0-5f3a-4e78-aa26-f85096e6d358 service nova] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Updated VIF entry in instance network info cache for port 4dc6f8a3-c6c8-459b-8a05-db8924adb128. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2273.500575] env[63024]: DEBUG nova.network.neutron [req-724b8b9c-ff5c-4012-8cc2-1de0aa0f2bb1 req-7f35dcb0-5f3a-4e78-aa26-f85096e6d358 service nova] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Updating instance_info_cache with network_info: [{"id": "4dc6f8a3-c6c8-459b-8a05-db8924adb128", "address": "fa:16:3e:74:e2:14", "network": {"id": "27687c23-521d-4beb-ad4f-278994149c2c", "bridge": null, "label": "tempest-ServerActionsTestOtherB-381619610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.138", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e53c02ad56640dc8cbc8839669b67bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap4dc6f8a3-c6", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2273.630946] env[63024]: DEBUG nova.scheduler.client.report [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2273.688132] env[63024]: DEBUG oslo_concurrency.lockutils [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "7d78b891-34c0-46dd-8b0d-ce80517232e1" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2274.003991] env[63024]: DEBUG oslo_concurrency.lockutils [req-724b8b9c-ff5c-4012-8cc2-1de0aa0f2bb1 req-7f35dcb0-5f3a-4e78-aa26-f85096e6d358 service nova] Releasing lock "refresh_cache-7d78b891-34c0-46dd-8b0d-ce80517232e1" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2274.136330] env[63024]: DEBUG oslo_concurrency.lockutils [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.112s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2274.138848] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 1.264s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2274.139036] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2274.139203] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63024) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2274.140318] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f43af57d-3d2e-47ae-a9b3-e1d7f530dd30 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2274.148491] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-894ffefa-26b6-4f14-93a8-16da8b50dc24 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2274.162496] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e87c7602-192d-4f97-b8b6-40c2f54c7fe3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2274.169636] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2029d56-5e7a-491e-b54c-25e273311beb {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2274.199672] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180959MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=63024) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2274.199758] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2274.199985] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2274.645901] env[63024]: DEBUG oslo_concurrency.lockutils [None req-aa139802-6a26-4200-86a0-f0d39b3be49a tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "7d78b891-34c0-46dd-8b0d-ce80517232e1" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 22.322s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2274.647061] env[63024]: DEBUG oslo_concurrency.lockutils [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "7d78b891-34c0-46dd-8b0d-ce80517232e1" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.959s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2274.647153] env[63024]: INFO nova.compute.manager [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Unshelving [ 2275.128455] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Acquiring lock "e79d7141-00e5-40c9-a88f-244a3ae685d8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2275.128752] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Lock "e79d7141-00e5-40c9-a88f-244a3ae685d8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2275.221338] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 14bafeba-9f5b-4488-b29c-38939973deb9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2275.330277] env[63024]: DEBUG oslo_concurrency.lockutils [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Acquiring lock "2e59f840-26bf-4192-b1ee-3645e9a64d1a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2275.330505] env[63024]: DEBUG oslo_concurrency.lockutils [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Lock "2e59f840-26bf-4192-b1ee-3645e9a64d1a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2275.631039] env[63024]: DEBUG nova.compute.manager [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: e79d7141-00e5-40c9-a88f-244a3ae685d8] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2275.671279] env[63024]: DEBUG oslo_concurrency.lockutils [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2275.724562] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 7d78b891-34c0-46dd-8b0d-ce80517232e1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2275.833147] env[63024]: DEBUG nova.compute.manager [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2276.149272] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2276.227897] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance e79d7141-00e5-40c9-a88f-244a3ae685d8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2276.352260] env[63024]: DEBUG oslo_concurrency.lockutils [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2276.731199] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 2e59f840-26bf-4192-b1ee-3645e9a64d1a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2276.731492] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2276.731577] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2276.797228] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c68fa751-0836-4822-8550-0255a02678e2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2276.804953] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3eef9b2b-06c2-4fd5-a4b4-0725dbcd6c70 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2276.833973] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbd96a91-8e3b-4161-a7f7-f6d9fba98eaa {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2276.841149] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de120588-832e-4d1f-9e8f-eca761d8e025 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2276.853886] env[63024]: DEBUG nova.compute.provider_tree [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2277.356514] env[63024]: DEBUG nova.scheduler.client.report [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2277.861631] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63024) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2277.861976] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.662s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2277.862148] env[63024]: DEBUG oslo_concurrency.lockutils [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.191s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2277.862380] env[63024]: DEBUG nova.objects.instance [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lazy-loading 'pci_requests' on Instance uuid 7d78b891-34c0-46dd-8b0d-ce80517232e1 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2278.367196] env[63024]: DEBUG nova.objects.instance [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lazy-loading 'numa_topology' on Instance uuid 7d78b891-34c0-46dd-8b0d-ce80517232e1 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2278.871102] env[63024]: INFO nova.compute.claims [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2279.939048] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f424bb6-684a-45de-a834-bed3a5738dc9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2279.946151] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9acc748e-026d-4380-bf0b-95ce11b0c864 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2279.975588] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-315f5439-2041-4569-b367-fbd7642c9a94 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2279.982667] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c71c5d0f-19a3-4272-892a-15340967a2f7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2279.995119] env[63024]: DEBUG nova.compute.provider_tree [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2280.497939] env[63024]: DEBUG nova.scheduler.client.report [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2281.002963] env[63024]: DEBUG oslo_concurrency.lockutils [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.141s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2281.005351] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.856s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2281.008213] env[63024]: INFO nova.compute.claims [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: e79d7141-00e5-40c9-a88f-244a3ae685d8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2281.038055] env[63024]: INFO nova.network.neutron [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Updating port 4dc6f8a3-c6c8-459b-8a05-db8924adb128 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 2282.074388] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9cba944-15ae-4716-aa78-33549adeee7e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.081796] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1c392d2-9a2b-4513-be2c-c2ee3bce047a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.111239] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3a90a1d-e98e-4701-b828-0454382e0e2e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.118557] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0d56bc2-094a-4202-a2f8-69a05d967cbd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.131362] env[63024]: DEBUG nova.compute.provider_tree [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2282.434920] env[63024]: DEBUG nova.compute.manager [req-7274abe5-2ffa-4986-9e5d-4a0516557b18 req-43160fed-ac24-4313-a259-8a38dd8f5676 service nova] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Received event network-vif-plugged-4dc6f8a3-c6c8-459b-8a05-db8924adb128 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2282.435296] env[63024]: DEBUG oslo_concurrency.lockutils [req-7274abe5-2ffa-4986-9e5d-4a0516557b18 req-43160fed-ac24-4313-a259-8a38dd8f5676 service nova] Acquiring lock "7d78b891-34c0-46dd-8b0d-ce80517232e1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2282.435490] env[63024]: DEBUG oslo_concurrency.lockutils [req-7274abe5-2ffa-4986-9e5d-4a0516557b18 req-43160fed-ac24-4313-a259-8a38dd8f5676 service nova] Lock "7d78b891-34c0-46dd-8b0d-ce80517232e1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2282.435662] env[63024]: DEBUG oslo_concurrency.lockutils [req-7274abe5-2ffa-4986-9e5d-4a0516557b18 req-43160fed-ac24-4313-a259-8a38dd8f5676 service nova] Lock "7d78b891-34c0-46dd-8b0d-ce80517232e1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2282.435833] env[63024]: DEBUG nova.compute.manager [req-7274abe5-2ffa-4986-9e5d-4a0516557b18 req-43160fed-ac24-4313-a259-8a38dd8f5676 service nova] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] No waiting events found dispatching network-vif-plugged-4dc6f8a3-c6c8-459b-8a05-db8924adb128 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2282.436030] env[63024]: WARNING nova.compute.manager [req-7274abe5-2ffa-4986-9e5d-4a0516557b18 req-43160fed-ac24-4313-a259-8a38dd8f5676 service nova] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Received unexpected event network-vif-plugged-4dc6f8a3-c6c8-459b-8a05-db8924adb128 for instance with vm_state shelved_offloaded and task_state spawning. [ 2282.521263] env[63024]: DEBUG oslo_concurrency.lockutils [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "refresh_cache-7d78b891-34c0-46dd-8b0d-ce80517232e1" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2282.521382] env[63024]: DEBUG oslo_concurrency.lockutils [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquired lock "refresh_cache-7d78b891-34c0-46dd-8b0d-ce80517232e1" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2282.521564] env[63024]: DEBUG nova.network.neutron [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2282.634395] env[63024]: DEBUG nova.scheduler.client.report [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2283.138896] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.133s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2283.139485] env[63024]: DEBUG nova.compute.manager [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: e79d7141-00e5-40c9-a88f-244a3ae685d8] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2283.142704] env[63024]: DEBUG oslo_concurrency.lockutils [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.790s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2283.143502] env[63024]: INFO nova.compute.claims [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2283.272994] env[63024]: DEBUG nova.network.neutron [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Updating instance_info_cache with network_info: [{"id": "4dc6f8a3-c6c8-459b-8a05-db8924adb128", "address": "fa:16:3e:74:e2:14", "network": {"id": "27687c23-521d-4beb-ad4f-278994149c2c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-381619610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.138", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e53c02ad56640dc8cbc8839669b67bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "09bf081b-cdf0-4977-abe2-2339a87409ab", "external-id": "nsx-vlan-transportzone-378", "segmentation_id": 378, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4dc6f8a3-c6", "ovs_interfaceid": "4dc6f8a3-c6c8-459b-8a05-db8924adb128", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2283.427409] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3a45c5a5-62e6-486d-ba3d-bd87cba74dfc tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquiring lock "14bafeba-9f5b-4488-b29c-38939973deb9" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2283.427698] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3a45c5a5-62e6-486d-ba3d-bd87cba74dfc tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lock "14bafeba-9f5b-4488-b29c-38939973deb9" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2283.648107] env[63024]: DEBUG nova.compute.utils [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2283.650029] env[63024]: DEBUG nova.compute.manager [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: e79d7141-00e5-40c9-a88f-244a3ae685d8] Not allocating networking since 'none' was specified. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 2283.776099] env[63024]: DEBUG oslo_concurrency.lockutils [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Releasing lock "refresh_cache-7d78b891-34c0-46dd-8b0d-ce80517232e1" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2283.807380] env[63024]: DEBUG nova.virt.hardware [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='b0bf176c2212c1980e0b497d59b499ee',container_format='bare',created_at=2024-12-22T11:18:20Z,direct_url=,disk_format='vmdk',id=6367336c-9163-4307-b69a-e0e54fef2edf,min_disk=1,min_ram=0,name='tempest-ServerActionsTestOtherB-server-841830532-shelved',owner='0e53c02ad56640dc8cbc8839669b67bf',properties=ImageMetaProps,protected=,size=31664640,status='active',tags=,updated_at=2024-12-22T11:18:34Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2283.807687] env[63024]: DEBUG nova.virt.hardware [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2283.807889] env[63024]: DEBUG nova.virt.hardware [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2283.808124] env[63024]: DEBUG nova.virt.hardware [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2283.808319] env[63024]: DEBUG nova.virt.hardware [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2283.808504] env[63024]: DEBUG nova.virt.hardware [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2283.808769] env[63024]: DEBUG nova.virt.hardware [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2283.808967] env[63024]: DEBUG nova.virt.hardware [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2283.809189] env[63024]: DEBUG nova.virt.hardware [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2283.809373] env[63024]: DEBUG nova.virt.hardware [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2283.809608] env[63024]: DEBUG nova.virt.hardware [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2283.810538] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2da7219f-4cf6-429c-bcb6-dd626cb2f8a7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.818803] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd36fbad-ab05-427e-a497-050feba6a50b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.832992] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:74:e2:14', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '09bf081b-cdf0-4977-abe2-2339a87409ab', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4dc6f8a3-c6c8-459b-8a05-db8924adb128', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2283.840678] env[63024]: DEBUG oslo.service.loopingcall [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2283.840955] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2283.841193] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d5098d9e-a3b9-4634-85b2-bc88bebb3e5c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.871241] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2283.871241] env[63024]: value = "task-1952014" [ 2283.871241] env[63024]: _type = "Task" [ 2283.871241] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2283.881115] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1952014, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2283.930892] env[63024]: INFO nova.compute.manager [None req-3a45c5a5-62e6-486d-ba3d-bd87cba74dfc tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Detaching volume 06603432-9319-4172-af00-cbb4469c359f [ 2283.966430] env[63024]: INFO nova.virt.block_device [None req-3a45c5a5-62e6-486d-ba3d-bd87cba74dfc tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Attempting to driver detach volume 06603432-9319-4172-af00-cbb4469c359f from mountpoint /dev/sdb [ 2283.966699] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a45c5a5-62e6-486d-ba3d-bd87cba74dfc tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Volume detach. Driver type: vmdk {{(pid=63024) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 2283.966906] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a45c5a5-62e6-486d-ba3d-bd87cba74dfc tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402275', 'volume_id': '06603432-9319-4172-af00-cbb4469c359f', 'name': 'volume-06603432-9319-4172-af00-cbb4469c359f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '14bafeba-9f5b-4488-b29c-38939973deb9', 'attached_at': '', 'detached_at': '', 'volume_id': '06603432-9319-4172-af00-cbb4469c359f', 'serial': '06603432-9319-4172-af00-cbb4469c359f'} {{(pid=63024) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 2283.967851] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c5f55a2-dd8b-4e5f-bc59-1f4fdeb639d2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.996506] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60bc3fab-fd53-4bbf-a686-273ed1d94cb9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.004236] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb0ee7b6-dd84-42b1-ade2-47571141be77 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.025060] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95654281-d7a0-4d90-8215-182ec22fa4d3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.041882] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a45c5a5-62e6-486d-ba3d-bd87cba74dfc tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] The volume has not been displaced from its original location: [datastore1] volume-06603432-9319-4172-af00-cbb4469c359f/volume-06603432-9319-4172-af00-cbb4469c359f.vmdk. No consolidation needed. {{(pid=63024) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 2284.047776] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a45c5a5-62e6-486d-ba3d-bd87cba74dfc tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Reconfiguring VM instance instance-00000073 to detach disk 2001 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 2284.048173] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6d98dbb8-6910-4358-9a5a-1af9cd60ddb7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.068467] env[63024]: DEBUG oslo_vmware.api [None req-3a45c5a5-62e6-486d-ba3d-bd87cba74dfc tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2284.068467] env[63024]: value = "task-1952015" [ 2284.068467] env[63024]: _type = "Task" [ 2284.068467] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2284.077511] env[63024]: DEBUG oslo_vmware.api [None req-3a45c5a5-62e6-486d-ba3d-bd87cba74dfc tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952015, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2284.153508] env[63024]: DEBUG nova.compute.manager [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: e79d7141-00e5-40c9-a88f-244a3ae685d8] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2284.233386] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0623e14a-d69b-4913-8929-db2313c12472 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.241204] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0222c8bd-1540-401e-8e7f-89c96aabdf31 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.272067] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa5efb05-2443-411b-b65a-27ada2d58544 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.280397] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7d301fa-8a12-4992-9b60-42121aff2d66 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.295906] env[63024]: DEBUG nova.compute.provider_tree [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2284.381334] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1952014, 'name': CreateVM_Task, 'duration_secs': 0.329018} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2284.381507] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2284.382189] env[63024]: DEBUG oslo_concurrency.lockutils [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6367336c-9163-4307-b69a-e0e54fef2edf" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2284.382356] env[63024]: DEBUG oslo_concurrency.lockutils [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6367336c-9163-4307-b69a-e0e54fef2edf" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2284.382853] env[63024]: DEBUG oslo_concurrency.lockutils [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/6367336c-9163-4307-b69a-e0e54fef2edf" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2284.382995] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-faa8a62c-6303-4475-a025-d7ea52d27421 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.388154] env[63024]: DEBUG oslo_vmware.api [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2284.388154] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e159cd-a9bf-5a7e-5b61-fc228b9effac" [ 2284.388154] env[63024]: _type = "Task" [ 2284.388154] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2284.399699] env[63024]: DEBUG oslo_vmware.api [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52e159cd-a9bf-5a7e-5b61-fc228b9effac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2284.461086] env[63024]: DEBUG nova.compute.manager [req-b61893ed-733f-4500-90b1-2083a73cee21 req-2d468e4a-1f4c-4a23-9d9d-776e1edb3a9b service nova] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Received event network-changed-4dc6f8a3-c6c8-459b-8a05-db8924adb128 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2284.461418] env[63024]: DEBUG nova.compute.manager [req-b61893ed-733f-4500-90b1-2083a73cee21 req-2d468e4a-1f4c-4a23-9d9d-776e1edb3a9b service nova] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Refreshing instance network info cache due to event network-changed-4dc6f8a3-c6c8-459b-8a05-db8924adb128. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2284.461744] env[63024]: DEBUG oslo_concurrency.lockutils [req-b61893ed-733f-4500-90b1-2083a73cee21 req-2d468e4a-1f4c-4a23-9d9d-776e1edb3a9b service nova] Acquiring lock "refresh_cache-7d78b891-34c0-46dd-8b0d-ce80517232e1" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2284.461988] env[63024]: DEBUG oslo_concurrency.lockutils [req-b61893ed-733f-4500-90b1-2083a73cee21 req-2d468e4a-1f4c-4a23-9d9d-776e1edb3a9b service nova] Acquired lock "refresh_cache-7d78b891-34c0-46dd-8b0d-ce80517232e1" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2284.462282] env[63024]: DEBUG nova.network.neutron [req-b61893ed-733f-4500-90b1-2083a73cee21 req-2d468e4a-1f4c-4a23-9d9d-776e1edb3a9b service nova] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Refreshing network info cache for port 4dc6f8a3-c6c8-459b-8a05-db8924adb128 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2284.578651] env[63024]: DEBUG oslo_vmware.api [None req-3a45c5a5-62e6-486d-ba3d-bd87cba74dfc tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952015, 'name': ReconfigVM_Task, 'duration_secs': 0.233179} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2284.578884] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a45c5a5-62e6-486d-ba3d-bd87cba74dfc tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Reconfigured VM instance instance-00000073 to detach disk 2001 {{(pid=63024) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 2284.583729] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c93092fb-7ecf-4285-922e-18b6a667381e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.600445] env[63024]: DEBUG oslo_vmware.api [None req-3a45c5a5-62e6-486d-ba3d-bd87cba74dfc tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2284.600445] env[63024]: value = "task-1952016" [ 2284.600445] env[63024]: _type = "Task" [ 2284.600445] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2284.608744] env[63024]: DEBUG oslo_vmware.api [None req-3a45c5a5-62e6-486d-ba3d-bd87cba74dfc tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952016, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2284.799226] env[63024]: DEBUG nova.scheduler.client.report [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2284.899583] env[63024]: DEBUG oslo_concurrency.lockutils [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6367336c-9163-4307-b69a-e0e54fef2edf" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2284.899852] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Processing image 6367336c-9163-4307-b69a-e0e54fef2edf {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2284.900111] env[63024]: DEBUG oslo_concurrency.lockutils [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/6367336c-9163-4307-b69a-e0e54fef2edf/6367336c-9163-4307-b69a-e0e54fef2edf.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2284.900263] env[63024]: DEBUG oslo_concurrency.lockutils [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquired lock "[datastore1] devstack-image-cache_base/6367336c-9163-4307-b69a-e0e54fef2edf/6367336c-9163-4307-b69a-e0e54fef2edf.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2284.900444] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2284.900690] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2d807cb9-2752-439f-bb87-f22e06192488 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.909275] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2284.909444] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2284.910121] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b3e1edf-d37e-4aee-a668-a7950a852bd8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.914830] env[63024]: DEBUG oslo_vmware.api [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2284.914830] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52354ad0-530e-5979-3212-6acfd777fea4" [ 2284.914830] env[63024]: _type = "Task" [ 2284.914830] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2284.926673] env[63024]: DEBUG oslo_vmware.api [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52354ad0-530e-5979-3212-6acfd777fea4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2285.110062] env[63024]: DEBUG oslo_vmware.api [None req-3a45c5a5-62e6-486d-ba3d-bd87cba74dfc tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952016, 'name': ReconfigVM_Task, 'duration_secs': 0.193939} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2285.110384] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-3a45c5a5-62e6-486d-ba3d-bd87cba74dfc tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-402275', 'volume_id': '06603432-9319-4172-af00-cbb4469c359f', 'name': 'volume-06603432-9319-4172-af00-cbb4469c359f', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': '14bafeba-9f5b-4488-b29c-38939973deb9', 'attached_at': '', 'detached_at': '', 'volume_id': '06603432-9319-4172-af00-cbb4469c359f', 'serial': '06603432-9319-4172-af00-cbb4469c359f'} {{(pid=63024) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 2285.167353] env[63024]: DEBUG nova.compute.manager [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: e79d7141-00e5-40c9-a88f-244a3ae685d8] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2285.183826] env[63024]: DEBUG nova.network.neutron [req-b61893ed-733f-4500-90b1-2083a73cee21 req-2d468e4a-1f4c-4a23-9d9d-776e1edb3a9b service nova] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Updated VIF entry in instance network info cache for port 4dc6f8a3-c6c8-459b-8a05-db8924adb128. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2285.184230] env[63024]: DEBUG nova.network.neutron [req-b61893ed-733f-4500-90b1-2083a73cee21 req-2d468e4a-1f4c-4a23-9d9d-776e1edb3a9b service nova] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Updating instance_info_cache with network_info: [{"id": "4dc6f8a3-c6c8-459b-8a05-db8924adb128", "address": "fa:16:3e:74:e2:14", "network": {"id": "27687c23-521d-4beb-ad4f-278994149c2c", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-381619610-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.138", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e53c02ad56640dc8cbc8839669b67bf", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "09bf081b-cdf0-4977-abe2-2339a87409ab", "external-id": "nsx-vlan-transportzone-378", "segmentation_id": 378, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4dc6f8a3-c6", "ovs_interfaceid": "4dc6f8a3-c6c8-459b-8a05-db8924adb128", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2285.188944] env[63024]: DEBUG nova.virt.hardware [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2285.189199] env[63024]: DEBUG nova.virt.hardware [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2285.189360] env[63024]: DEBUG nova.virt.hardware [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2285.189543] env[63024]: DEBUG nova.virt.hardware [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2285.189689] env[63024]: DEBUG nova.virt.hardware [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2285.189864] env[63024]: DEBUG nova.virt.hardware [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2285.190092] env[63024]: DEBUG nova.virt.hardware [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2285.190259] env[63024]: DEBUG nova.virt.hardware [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2285.190431] env[63024]: DEBUG nova.virt.hardware [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2285.190595] env[63024]: DEBUG nova.virt.hardware [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2285.190768] env[63024]: DEBUG nova.virt.hardware [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2285.191636] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64150fed-f3f3-4476-89f5-2980025e6439 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.201023] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c414fbad-0b0f-41ab-969f-3edd35cccba7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.215885] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: e79d7141-00e5-40c9-a88f-244a3ae685d8] Instance VIF info [] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2285.221341] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Creating folder: Project (65c7f071787147648bfda64bf9da6a26). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2285.221604] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6df99c6b-b1ac-44eb-8f3b-7dc464b86644 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.236728] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Created folder: Project (65c7f071787147648bfda64bf9da6a26) in parent group-v401959. [ 2285.236962] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Creating folder: Instances. Parent ref: group-v402283. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2285.237211] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-37c472b4-23d2-4fff-bb4d-b20b138b91dd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.248091] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Created folder: Instances in parent group-v402283. [ 2285.248315] env[63024]: DEBUG oslo.service.loopingcall [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2285.248497] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e79d7141-00e5-40c9-a88f-244a3ae685d8] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2285.248697] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1cc1fb88-b68e-424b-9f5b-56b6e8f17664 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.263862] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2285.263862] env[63024]: value = "task-1952019" [ 2285.263862] env[63024]: _type = "Task" [ 2285.263862] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2285.270931] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1952019, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2285.304842] env[63024]: DEBUG oslo_concurrency.lockutils [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.163s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2285.305475] env[63024]: DEBUG nova.compute.manager [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2285.425494] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Preparing fetch location {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2285.425769] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Fetch image to [datastore1] OSTACK_IMG_9aeb8e85-0a04-4948-aa59-b65221d96347/OSTACK_IMG_9aeb8e85-0a04-4948-aa59-b65221d96347.vmdk {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2285.425965] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Downloading stream optimized image 6367336c-9163-4307-b69a-e0e54fef2edf to [datastore1] OSTACK_IMG_9aeb8e85-0a04-4948-aa59-b65221d96347/OSTACK_IMG_9aeb8e85-0a04-4948-aa59-b65221d96347.vmdk on the data store datastore1 as vApp {{(pid=63024) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 2285.426243] env[63024]: DEBUG nova.virt.vmwareapi.images [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Downloading image file data 6367336c-9163-4307-b69a-e0e54fef2edf to the ESX as VM named 'OSTACK_IMG_9aeb8e85-0a04-4948-aa59-b65221d96347' {{(pid=63024) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 2285.496046] env[63024]: DEBUG oslo_vmware.rw_handles [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 2285.496046] env[63024]: value = "resgroup-9" [ 2285.496046] env[63024]: _type = "ResourcePool" [ 2285.496046] env[63024]: }. {{(pid=63024) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 2285.496369] env[63024]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-3e451c9e-a02c-4120-a98a-308e0e16debe {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.516604] env[63024]: DEBUG oslo_vmware.rw_handles [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lease: (returnval){ [ 2285.516604] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52874b0c-8d40-2d7a-b191-14468dee4c87" [ 2285.516604] env[63024]: _type = "HttpNfcLease" [ 2285.516604] env[63024]: } obtained for vApp import into resource pool (val){ [ 2285.516604] env[63024]: value = "resgroup-9" [ 2285.516604] env[63024]: _type = "ResourcePool" [ 2285.516604] env[63024]: }. {{(pid=63024) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 2285.516938] env[63024]: DEBUG oslo_vmware.api [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the lease: (returnval){ [ 2285.516938] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52874b0c-8d40-2d7a-b191-14468dee4c87" [ 2285.516938] env[63024]: _type = "HttpNfcLease" [ 2285.516938] env[63024]: } to be ready. {{(pid=63024) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2285.522661] env[63024]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2285.522661] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52874b0c-8d40-2d7a-b191-14468dee4c87" [ 2285.522661] env[63024]: _type = "HttpNfcLease" [ 2285.522661] env[63024]: } is initializing. {{(pid=63024) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2285.652019] env[63024]: DEBUG nova.objects.instance [None req-3a45c5a5-62e6-486d-ba3d-bd87cba74dfc tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lazy-loading 'flavor' on Instance uuid 14bafeba-9f5b-4488-b29c-38939973deb9 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2285.687437] env[63024]: DEBUG oslo_concurrency.lockutils [req-b61893ed-733f-4500-90b1-2083a73cee21 req-2d468e4a-1f4c-4a23-9d9d-776e1edb3a9b service nova] Releasing lock "refresh_cache-7d78b891-34c0-46dd-8b0d-ce80517232e1" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2285.773647] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1952019, 'name': CreateVM_Task, 'duration_secs': 0.24875} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2285.773851] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e79d7141-00e5-40c9-a88f-244a3ae685d8] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2285.774259] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2285.774421] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2285.774743] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2285.774993] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-133eeeb0-0480-4f66-941a-4d6f111f52a3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2285.779310] env[63024]: DEBUG oslo_vmware.api [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Waiting for the task: (returnval){ [ 2285.779310] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52087f1b-7a6c-2db3-9e10-54dbf9e194a0" [ 2285.779310] env[63024]: _type = "Task" [ 2285.779310] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2285.786530] env[63024]: DEBUG oslo_vmware.api [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52087f1b-7a6c-2db3-9e10-54dbf9e194a0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2285.810473] env[63024]: DEBUG nova.compute.utils [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2285.811928] env[63024]: DEBUG nova.compute.manager [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Not allocating networking since 'none' was specified. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 2286.025640] env[63024]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2286.025640] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52874b0c-8d40-2d7a-b191-14468dee4c87" [ 2286.025640] env[63024]: _type = "HttpNfcLease" [ 2286.025640] env[63024]: } is ready. {{(pid=63024) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2286.025932] env[63024]: DEBUG oslo_vmware.rw_handles [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2286.025932] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52874b0c-8d40-2d7a-b191-14468dee4c87" [ 2286.025932] env[63024]: _type = "HttpNfcLease" [ 2286.025932] env[63024]: }. {{(pid=63024) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 2286.026655] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd474e17-92c4-4d99-9082-b9a67430a900 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.033823] env[63024]: DEBUG oslo_vmware.rw_handles [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fcf73d-8afe-c9d5-bbd1-efe2d95a4ba6/disk-0.vmdk from lease info. {{(pid=63024) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2286.034074] env[63024]: DEBUG oslo_vmware.rw_handles [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Creating HTTP connection to write to file with size = 31664640 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fcf73d-8afe-c9d5-bbd1-efe2d95a4ba6/disk-0.vmdk. {{(pid=63024) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2286.096573] env[63024]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-afb8fd51-f9ac-4c01-8b75-b513d7c4e08a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.291894] env[63024]: DEBUG oslo_vmware.api [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52087f1b-7a6c-2db3-9e10-54dbf9e194a0, 'name': SearchDatastore_Task, 'duration_secs': 0.017855} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2286.293838] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2286.294213] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: e79d7141-00e5-40c9-a88f-244a3ae685d8] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2286.294385] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2286.294533] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2286.294711] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2286.294974] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6cc4fb47-c98d-4a24-88f7-71b11a07d9cd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.307843] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2286.308075] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2286.308841] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2230e49-6e2e-4c45-8392-a3513ad91d00 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.312946] env[63024]: DEBUG nova.compute.manager [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2286.316766] env[63024]: DEBUG oslo_vmware.api [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Waiting for the task: (returnval){ [ 2286.316766] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52863f9f-b1dd-06d1-e4c5-dcdbbed0a2bf" [ 2286.316766] env[63024]: _type = "Task" [ 2286.316766] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2286.327567] env[63024]: DEBUG oslo_vmware.api [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52863f9f-b1dd-06d1-e4c5-dcdbbed0a2bf, 'name': SearchDatastore_Task, 'duration_secs': 0.008125} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2286.328340] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3194e6c-429d-4e61-be77-6fa890a639f1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.334633] env[63024]: DEBUG oslo_vmware.api [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Waiting for the task: (returnval){ [ 2286.334633] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52923809-f71a-698b-928e-7cc24b53b544" [ 2286.334633] env[63024]: _type = "Task" [ 2286.334633] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2286.343827] env[63024]: DEBUG oslo_vmware.api [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52923809-f71a-698b-928e-7cc24b53b544, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2286.661528] env[63024]: DEBUG oslo_concurrency.lockutils [None req-3a45c5a5-62e6-486d-ba3d-bd87cba74dfc tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lock "14bafeba-9f5b-4488-b29c-38939973deb9" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.234s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2286.847779] env[63024]: DEBUG oslo_vmware.api [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52923809-f71a-698b-928e-7cc24b53b544, 'name': SearchDatastore_Task, 'duration_secs': 0.009183} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2286.848051] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2286.848318] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] e79d7141-00e5-40c9-a88f-244a3ae685d8/e79d7141-00e5-40c9-a88f-244a3ae685d8.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2286.848581] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-381a4419-8d20-403e-a3b7-2e082ed0bab8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2286.854796] env[63024]: DEBUG oslo_vmware.api [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Waiting for the task: (returnval){ [ 2286.854796] env[63024]: value = "task-1952021" [ 2286.854796] env[63024]: _type = "Task" [ 2286.854796] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2286.863861] env[63024]: DEBUG oslo_vmware.api [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952021, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2287.100500] env[63024]: DEBUG oslo_vmware.rw_handles [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Completed reading data from the image iterator. {{(pid=63024) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2287.100861] env[63024]: DEBUG oslo_vmware.rw_handles [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fcf73d-8afe-c9d5-bbd1-efe2d95a4ba6/disk-0.vmdk. {{(pid=63024) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2287.102048] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44514651-ab38-449c-8315-ef4ff6e957a4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.111189] env[63024]: DEBUG oslo_vmware.rw_handles [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fcf73d-8afe-c9d5-bbd1-efe2d95a4ba6/disk-0.vmdk is in state: ready. {{(pid=63024) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2287.111455] env[63024]: DEBUG oslo_vmware.rw_handles [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Releasing lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fcf73d-8afe-c9d5-bbd1-efe2d95a4ba6/disk-0.vmdk. {{(pid=63024) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 2287.111766] env[63024]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-667da921-01d8-46d0-84eb-a66d8e430e54 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.326148] env[63024]: DEBUG nova.compute.manager [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2287.330092] env[63024]: DEBUG oslo_vmware.rw_handles [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Closed VMDK write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fcf73d-8afe-c9d5-bbd1-efe2d95a4ba6/disk-0.vmdk. {{(pid=63024) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 2287.330418] env[63024]: INFO nova.virt.vmwareapi.images [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Downloaded image file data 6367336c-9163-4307-b69a-e0e54fef2edf [ 2287.331121] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a74e02f8-d468-4a0a-8e7c-7a1cce5458a7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.349575] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-13a02996-ed84-433d-8190-7577c2cc701f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.357416] env[63024]: DEBUG nova.virt.hardware [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2287.357668] env[63024]: DEBUG nova.virt.hardware [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2287.357825] env[63024]: DEBUG nova.virt.hardware [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2287.358017] env[63024]: DEBUG nova.virt.hardware [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2287.358167] env[63024]: DEBUG nova.virt.hardware [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2287.358353] env[63024]: DEBUG nova.virt.hardware [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2287.358526] env[63024]: DEBUG nova.virt.hardware [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2287.358681] env[63024]: DEBUG nova.virt.hardware [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2287.358848] env[63024]: DEBUG nova.virt.hardware [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2287.359022] env[63024]: DEBUG nova.virt.hardware [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2287.359205] env[63024]: DEBUG nova.virt.hardware [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2287.360064] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d99276a-02d6-43b4-8ad1-16b4c93a9b05 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.373057] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfd8f46d-42fb-48e3-adc8-0bc8a4131c2a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.376950] env[63024]: DEBUG oslo_vmware.api [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952021, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.452068} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2287.377661] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] e79d7141-00e5-40c9-a88f-244a3ae685d8/e79d7141-00e5-40c9-a88f-244a3ae685d8.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2287.377875] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: e79d7141-00e5-40c9-a88f-244a3ae685d8] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2287.378917] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-19bf25ff-8ace-4c9b-84ac-fedc3817ae47 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.380756] env[63024]: INFO nova.virt.vmwareapi.images [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] The imported VM was unregistered [ 2287.383063] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Caching image {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2287.383288] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Creating directory with path [datastore1] devstack-image-cache_base/6367336c-9163-4307-b69a-e0e54fef2edf {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2287.391847] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-35b34ef8-9dec-4a45-b218-4b1acaf5cda7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.393589] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Instance VIF info [] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2287.398930] env[63024]: DEBUG oslo.service.loopingcall [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2287.399480] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2287.399981] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dd336708-acd3-4382-a9ab-21632f96a686 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.412864] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Created directory with path [datastore1] devstack-image-cache_base/6367336c-9163-4307-b69a-e0e54fef2edf {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2287.413056] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_9aeb8e85-0a04-4948-aa59-b65221d96347/OSTACK_IMG_9aeb8e85-0a04-4948-aa59-b65221d96347.vmdk to [datastore1] devstack-image-cache_base/6367336c-9163-4307-b69a-e0e54fef2edf/6367336c-9163-4307-b69a-e0e54fef2edf.vmdk. {{(pid=63024) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 2287.413353] env[63024]: DEBUG oslo_vmware.api [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Waiting for the task: (returnval){ [ 2287.413353] env[63024]: value = "task-1952023" [ 2287.413353] env[63024]: _type = "Task" [ 2287.413353] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2287.413820] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-33b2a16a-b7c5-4be2-8553-e7c9c8167752 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.419514] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2287.419514] env[63024]: value = "task-1952024" [ 2287.419514] env[63024]: _type = "Task" [ 2287.419514] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2287.425991] env[63024]: DEBUG oslo_vmware.api [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952023, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2287.426290] env[63024]: DEBUG oslo_vmware.api [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2287.426290] env[63024]: value = "task-1952025" [ 2287.426290] env[63024]: _type = "Task" [ 2287.426290] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2287.428992] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1952024, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2287.435972] env[63024]: DEBUG oslo_vmware.api [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1952025, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2287.699923] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6789f37d-f603-4aef-b86e-c89a8a1e99be tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquiring lock "14bafeba-9f5b-4488-b29c-38939973deb9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2287.700247] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6789f37d-f603-4aef-b86e-c89a8a1e99be tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lock "14bafeba-9f5b-4488-b29c-38939973deb9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2287.700489] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6789f37d-f603-4aef-b86e-c89a8a1e99be tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquiring lock "14bafeba-9f5b-4488-b29c-38939973deb9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2287.700668] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6789f37d-f603-4aef-b86e-c89a8a1e99be tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lock "14bafeba-9f5b-4488-b29c-38939973deb9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2287.700839] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6789f37d-f603-4aef-b86e-c89a8a1e99be tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lock "14bafeba-9f5b-4488-b29c-38939973deb9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2287.703220] env[63024]: INFO nova.compute.manager [None req-6789f37d-f603-4aef-b86e-c89a8a1e99be tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Terminating instance [ 2287.931083] env[63024]: DEBUG oslo_vmware.api [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952023, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.055077} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2287.936769] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: e79d7141-00e5-40c9-a88f-244a3ae685d8] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2287.937097] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1952024, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2287.937823] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86e274e2-de6d-4179-9247-bdbe4ab8c930 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.947673] env[63024]: DEBUG oslo_vmware.api [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1952025, 'name': MoveVirtualDisk_Task} progress is 26%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2287.962992] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: e79d7141-00e5-40c9-a88f-244a3ae685d8] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] e79d7141-00e5-40c9-a88f-244a3ae685d8/e79d7141-00e5-40c9-a88f-244a3ae685d8.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2287.963428] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8223279d-b9a4-4690-bf8a-8bf82a191cda {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2287.986218] env[63024]: DEBUG oslo_vmware.api [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Waiting for the task: (returnval){ [ 2287.986218] env[63024]: value = "task-1952026" [ 2287.986218] env[63024]: _type = "Task" [ 2287.986218] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2287.998475] env[63024]: DEBUG oslo_vmware.api [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952026, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2288.207912] env[63024]: DEBUG nova.compute.manager [None req-6789f37d-f603-4aef-b86e-c89a8a1e99be tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2288.208207] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-6789f37d-f603-4aef-b86e-c89a8a1e99be tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2288.209165] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd5313aa-acc9-417b-acce-25836122065c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2288.218839] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-6789f37d-f603-4aef-b86e-c89a8a1e99be tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2288.219143] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-797ee812-a463-4e7e-b6ee-cace6f073fc3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2288.229136] env[63024]: DEBUG oslo_vmware.api [None req-6789f37d-f603-4aef-b86e-c89a8a1e99be tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2288.229136] env[63024]: value = "task-1952027" [ 2288.229136] env[63024]: _type = "Task" [ 2288.229136] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2288.240205] env[63024]: DEBUG oslo_vmware.api [None req-6789f37d-f603-4aef-b86e-c89a8a1e99be tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952027, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2288.433572] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1952024, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2288.443801] env[63024]: DEBUG oslo_vmware.api [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1952025, 'name': MoveVirtualDisk_Task} progress is 46%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2288.499123] env[63024]: DEBUG oslo_vmware.api [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952026, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2288.740994] env[63024]: DEBUG oslo_vmware.api [None req-6789f37d-f603-4aef-b86e-c89a8a1e99be tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952027, 'name': PowerOffVM_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2288.935635] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1952024, 'name': CreateVM_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2288.945390] env[63024]: DEBUG oslo_vmware.api [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1952025, 'name': MoveVirtualDisk_Task} progress is 69%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2288.999667] env[63024]: DEBUG oslo_vmware.api [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952026, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2289.242848] env[63024]: DEBUG oslo_vmware.api [None req-6789f37d-f603-4aef-b86e-c89a8a1e99be tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952027, 'name': PowerOffVM_Task, 'duration_secs': 0.536136} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2289.243189] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-6789f37d-f603-4aef-b86e-c89a8a1e99be tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2289.243368] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-6789f37d-f603-4aef-b86e-c89a8a1e99be tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2289.243633] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-01b67746-a452-4af6-b3e5-88be55494466 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2289.435046] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1952024, 'name': CreateVM_Task, 'duration_secs': 1.580459} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2289.435427] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2289.435784] env[63024]: DEBUG oslo_concurrency.lockutils [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2289.435952] env[63024]: DEBUG oslo_concurrency.lockutils [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2289.436342] env[63024]: DEBUG oslo_concurrency.lockutils [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2289.439717] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8ac5839-4fa0-41dd-840e-5e3f1a6b14ba {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2289.444823] env[63024]: DEBUG oslo_vmware.api [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Waiting for the task: (returnval){ [ 2289.444823] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52c406fa-3da8-3058-4259-df7ed5b60fbe" [ 2289.444823] env[63024]: _type = "Task" [ 2289.444823] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2289.450946] env[63024]: DEBUG oslo_vmware.api [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1952025, 'name': MoveVirtualDisk_Task} progress is 91%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2289.455910] env[63024]: DEBUG oslo_vmware.api [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52c406fa-3da8-3058-4259-df7ed5b60fbe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2289.497124] env[63024]: DEBUG oslo_vmware.api [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952026, 'name': ReconfigVM_Task, 'duration_secs': 1.490574} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2289.497434] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: e79d7141-00e5-40c9-a88f-244a3ae685d8] Reconfigured VM instance instance-0000007b to attach disk [datastore1] e79d7141-00e5-40c9-a88f-244a3ae685d8/e79d7141-00e5-40c9-a88f-244a3ae685d8.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2289.498089] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-242ae59e-aa0f-4b8b-875b-b11657d32404 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2289.505244] env[63024]: DEBUG oslo_vmware.api [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Waiting for the task: (returnval){ [ 2289.505244] env[63024]: value = "task-1952029" [ 2289.505244] env[63024]: _type = "Task" [ 2289.505244] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2289.517643] env[63024]: DEBUG oslo_vmware.api [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952029, 'name': Rename_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2289.945048] env[63024]: DEBUG oslo_vmware.api [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1952025, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.242064} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2289.945366] env[63024]: INFO nova.virt.vmwareapi.ds_util [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_9aeb8e85-0a04-4948-aa59-b65221d96347/OSTACK_IMG_9aeb8e85-0a04-4948-aa59-b65221d96347.vmdk to [datastore1] devstack-image-cache_base/6367336c-9163-4307-b69a-e0e54fef2edf/6367336c-9163-4307-b69a-e0e54fef2edf.vmdk. [ 2289.945554] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Cleaning up location [datastore1] OSTACK_IMG_9aeb8e85-0a04-4948-aa59-b65221d96347 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 2289.945717] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_9aeb8e85-0a04-4948-aa59-b65221d96347 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2289.948751] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-86557f6e-63d1-48a1-8fec-c452073821dd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2289.955035] env[63024]: DEBUG oslo_vmware.api [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52c406fa-3da8-3058-4259-df7ed5b60fbe, 'name': SearchDatastore_Task, 'duration_secs': 0.087205} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2289.956148] env[63024]: DEBUG oslo_concurrency.lockutils [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2289.956386] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2289.956624] env[63024]: DEBUG oslo_concurrency.lockutils [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2289.956780] env[63024]: DEBUG oslo_concurrency.lockutils [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2289.956961] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2289.957273] env[63024]: DEBUG oslo_vmware.api [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2289.957273] env[63024]: value = "task-1952030" [ 2289.957273] env[63024]: _type = "Task" [ 2289.957273] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2289.957449] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0b374b00-57a5-43b8-98ab-4cbfdd8f2a49 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2289.966120] env[63024]: DEBUG oslo_vmware.api [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1952030, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2289.967035] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2289.967211] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2289.967842] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2c4a4fb-cde8-47ea-9b10-ee96a878a254 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2289.972061] env[63024]: DEBUG oslo_vmware.api [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Waiting for the task: (returnval){ [ 2289.972061] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52c41e59-2ca8-be74-d763-0b925fe98749" [ 2289.972061] env[63024]: _type = "Task" [ 2289.972061] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2289.978770] env[63024]: DEBUG oslo_vmware.api [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52c41e59-2ca8-be74-d763-0b925fe98749, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2290.013331] env[63024]: DEBUG oslo_vmware.api [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952029, 'name': Rename_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2290.468833] env[63024]: DEBUG oslo_vmware.api [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1952030, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.033735} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2290.469219] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2290.469257] env[63024]: DEBUG oslo_concurrency.lockutils [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Releasing lock "[datastore1] devstack-image-cache_base/6367336c-9163-4307-b69a-e0e54fef2edf/6367336c-9163-4307-b69a-e0e54fef2edf.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2290.469489] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/6367336c-9163-4307-b69a-e0e54fef2edf/6367336c-9163-4307-b69a-e0e54fef2edf.vmdk to [datastore1] 7d78b891-34c0-46dd-8b0d-ce80517232e1/7d78b891-34c0-46dd-8b0d-ce80517232e1.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2290.469740] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2a94696a-54e3-4a50-a764-900c277ff507 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2290.476534] env[63024]: DEBUG oslo_vmware.api [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2290.476534] env[63024]: value = "task-1952031" [ 2290.476534] env[63024]: _type = "Task" [ 2290.476534] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2290.483723] env[63024]: DEBUG oslo_vmware.api [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52c41e59-2ca8-be74-d763-0b925fe98749, 'name': SearchDatastore_Task, 'duration_secs': 0.0078} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2290.484678] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0090f4c-3c31-4426-92f1-2de39d591cf4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2290.489181] env[63024]: DEBUG oslo_vmware.api [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1952031, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2290.492270] env[63024]: DEBUG oslo_vmware.api [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Waiting for the task: (returnval){ [ 2290.492270] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b8f0c6-febe-b617-dd95-90ed87c633fb" [ 2290.492270] env[63024]: _type = "Task" [ 2290.492270] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2290.499277] env[63024]: DEBUG oslo_vmware.api [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b8f0c6-febe-b617-dd95-90ed87c633fb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2290.513530] env[63024]: DEBUG oslo_vmware.api [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952029, 'name': Rename_Task} progress is 99%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2290.986432] env[63024]: DEBUG oslo_vmware.api [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1952031, 'name': CopyVirtualDisk_Task} progress is 24%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2291.001199] env[63024]: DEBUG oslo_vmware.api [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52b8f0c6-febe-b617-dd95-90ed87c633fb, 'name': SearchDatastore_Task, 'duration_secs': 0.008262} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2291.001516] env[63024]: DEBUG oslo_concurrency.lockutils [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2291.001746] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 2e59f840-26bf-4192-b1ee-3645e9a64d1a/2e59f840-26bf-4192-b1ee-3645e9a64d1a.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2291.002028] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7e5d6e5d-d23f-46e9-a299-eefaa35e3b7b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2291.011827] env[63024]: DEBUG oslo_vmware.api [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Waiting for the task: (returnval){ [ 2291.011827] env[63024]: value = "task-1952032" [ 2291.011827] env[63024]: _type = "Task" [ 2291.011827] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2291.018625] env[63024]: DEBUG oslo_vmware.api [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952029, 'name': Rename_Task, 'duration_secs': 1.025256} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2291.021648] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: e79d7141-00e5-40c9-a88f-244a3ae685d8] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2291.021923] env[63024]: DEBUG oslo_vmware.api [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952032, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2291.022152] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-751e4a91-f7d1-49be-a1e2-476e712f165a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2291.028800] env[63024]: DEBUG oslo_vmware.api [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Waiting for the task: (returnval){ [ 2291.028800] env[63024]: value = "task-1952033" [ 2291.028800] env[63024]: _type = "Task" [ 2291.028800] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2291.036929] env[63024]: DEBUG oslo_vmware.api [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952033, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2291.487821] env[63024]: DEBUG oslo_vmware.api [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1952031, 'name': CopyVirtualDisk_Task} progress is 46%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2291.522118] env[63024]: DEBUG oslo_vmware.api [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952032, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2291.537913] env[63024]: DEBUG oslo_vmware.api [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952033, 'name': PowerOnVM_Task} progress is 87%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2291.989017] env[63024]: DEBUG oslo_vmware.api [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1952031, 'name': CopyVirtualDisk_Task} progress is 69%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2292.022208] env[63024]: DEBUG oslo_vmware.api [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952032, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2292.039191] env[63024]: DEBUG oslo_vmware.api [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952033, 'name': PowerOnVM_Task, 'duration_secs': 1.007172} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2292.039454] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: e79d7141-00e5-40c9-a88f-244a3ae685d8] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2292.039796] env[63024]: INFO nova.compute.manager [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: e79d7141-00e5-40c9-a88f-244a3ae685d8] Took 6.87 seconds to spawn the instance on the hypervisor. [ 2292.039796] env[63024]: DEBUG nova.compute.manager [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: e79d7141-00e5-40c9-a88f-244a3ae685d8] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2292.040625] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50c360b8-8462-42b1-bab5-103b28e39849 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2292.454586] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-6789f37d-f603-4aef-b86e-c89a8a1e99be tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2292.454872] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-6789f37d-f603-4aef-b86e-c89a8a1e99be tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2292.455126] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-6789f37d-f603-4aef-b86e-c89a8a1e99be tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Deleting the datastore file [datastore1] 14bafeba-9f5b-4488-b29c-38939973deb9 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2292.455424] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bdd61a55-375f-498f-8cfe-40cb0004bcba {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2292.463886] env[63024]: DEBUG oslo_vmware.api [None req-6789f37d-f603-4aef-b86e-c89a8a1e99be tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2292.463886] env[63024]: value = "task-1952034" [ 2292.463886] env[63024]: _type = "Task" [ 2292.463886] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2292.474684] env[63024]: DEBUG oslo_vmware.api [None req-6789f37d-f603-4aef-b86e-c89a8a1e99be tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952034, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2292.491235] env[63024]: DEBUG oslo_vmware.api [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1952031, 'name': CopyVirtualDisk_Task} progress is 91%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2292.524832] env[63024]: DEBUG oslo_vmware.api [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952032, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2292.556968] env[63024]: INFO nova.compute.manager [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: e79d7141-00e5-40c9-a88f-244a3ae685d8] Took 16.42 seconds to build instance. [ 2292.974494] env[63024]: DEBUG oslo_vmware.api [None req-6789f37d-f603-4aef-b86e-c89a8a1e99be tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952034, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2292.989756] env[63024]: DEBUG oslo_vmware.api [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1952031, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.15707} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2292.990023] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/6367336c-9163-4307-b69a-e0e54fef2edf/6367336c-9163-4307-b69a-e0e54fef2edf.vmdk to [datastore1] 7d78b891-34c0-46dd-8b0d-ce80517232e1/7d78b891-34c0-46dd-8b0d-ce80517232e1.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2292.990777] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b4bdea0-08d8-42d8-9a83-3f6a9f5b6801 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2293.013070] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Reconfiguring VM instance instance-00000078 to attach disk [datastore1] 7d78b891-34c0-46dd-8b0d-ce80517232e1/7d78b891-34c0-46dd-8b0d-ce80517232e1.vmdk or device None with type streamOptimized {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2293.013347] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fb393f25-5daf-4cd7-a3e5-b1b736c465e5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2293.034928] env[63024]: DEBUG oslo_vmware.api [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952032, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.904573} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2293.036049] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 2e59f840-26bf-4192-b1ee-3645e9a64d1a/2e59f840-26bf-4192-b1ee-3645e9a64d1a.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2293.036284] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2293.036589] env[63024]: DEBUG oslo_vmware.api [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2293.036589] env[63024]: value = "task-1952035" [ 2293.036589] env[63024]: _type = "Task" [ 2293.036589] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2293.036768] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7d85b91e-fd0f-4359-992a-f598a2e4bc31 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2293.046232] env[63024]: DEBUG oslo_vmware.api [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1952035, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2293.047410] env[63024]: DEBUG oslo_vmware.api [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Waiting for the task: (returnval){ [ 2293.047410] env[63024]: value = "task-1952036" [ 2293.047410] env[63024]: _type = "Task" [ 2293.047410] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2293.055277] env[63024]: DEBUG oslo_vmware.api [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952036, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2293.058827] env[63024]: DEBUG oslo_concurrency.lockutils [None req-c9abd523-01d2-4a1e-b0f8-d5ba0df418fe tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Lock "e79d7141-00e5-40c9-a88f-244a3ae685d8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.930s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2293.475182] env[63024]: DEBUG oslo_vmware.api [None req-6789f37d-f603-4aef-b86e-c89a8a1e99be tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952034, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.519925} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2293.475445] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-6789f37d-f603-4aef-b86e-c89a8a1e99be tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2293.475632] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-6789f37d-f603-4aef-b86e-c89a8a1e99be tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2293.475808] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-6789f37d-f603-4aef-b86e-c89a8a1e99be tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2293.475979] env[63024]: INFO nova.compute.manager [None req-6789f37d-f603-4aef-b86e-c89a8a1e99be tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Took 5.27 seconds to destroy the instance on the hypervisor. [ 2293.476269] env[63024]: DEBUG oslo.service.loopingcall [None req-6789f37d-f603-4aef-b86e-c89a8a1e99be tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2293.476471] env[63024]: DEBUG nova.compute.manager [-] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2293.476565] env[63024]: DEBUG nova.network.neutron [-] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2293.547480] env[63024]: DEBUG oslo_vmware.api [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1952035, 'name': ReconfigVM_Task, 'duration_secs': 0.263396} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2293.547845] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Reconfigured VM instance instance-00000078 to attach disk [datastore1] 7d78b891-34c0-46dd-8b0d-ce80517232e1/7d78b891-34c0-46dd-8b0d-ce80517232e1.vmdk or device None with type streamOptimized {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2293.551038] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5db2af9f-d423-431f-9efc-4999da95d959 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2293.557012] env[63024]: DEBUG oslo_vmware.api [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952036, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065501} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2293.558086] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2293.558415] env[63024]: DEBUG oslo_vmware.api [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2293.558415] env[63024]: value = "task-1952037" [ 2293.558415] env[63024]: _type = "Task" [ 2293.558415] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2293.559051] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4a7f96d-3694-4f5a-924a-44387ca71cb1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2293.580209] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Reconfiguring VM instance instance-0000007c to attach disk [datastore1] 2e59f840-26bf-4192-b1ee-3645e9a64d1a/2e59f840-26bf-4192-b1ee-3645e9a64d1a.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2293.583407] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ae491633-36ce-4823-9a7f-6298eea04629 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2293.598431] env[63024]: DEBUG oslo_vmware.api [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1952037, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2293.604756] env[63024]: DEBUG oslo_vmware.api [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Waiting for the task: (returnval){ [ 2293.604756] env[63024]: value = "task-1952038" [ 2293.604756] env[63024]: _type = "Task" [ 2293.604756] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2293.614178] env[63024]: DEBUG oslo_vmware.api [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952038, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2293.891117] env[63024]: DEBUG nova.compute.manager [req-043b05e2-bda0-4b80-ba4c-2b74fa20dab5 req-bd4b835a-6d90-4a9d-8ebf-6d211e20578a service nova] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Received event network-vif-deleted-4ba24717-2947-46f0-9df8-733d8b40c345 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2293.891300] env[63024]: INFO nova.compute.manager [req-043b05e2-bda0-4b80-ba4c-2b74fa20dab5 req-bd4b835a-6d90-4a9d-8ebf-6d211e20578a service nova] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Neutron deleted interface 4ba24717-2947-46f0-9df8-733d8b40c345; detaching it from the instance and deleting it from the info cache [ 2293.891477] env[63024]: DEBUG nova.network.neutron [req-043b05e2-bda0-4b80-ba4c-2b74fa20dab5 req-bd4b835a-6d90-4a9d-8ebf-6d211e20578a service nova] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2294.071365] env[63024]: DEBUG oslo_vmware.api [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1952037, 'name': Rename_Task, 'duration_secs': 0.161497} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2294.071851] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2294.072285] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bdf52ef0-dbf4-472d-a601-48049c38a23c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2294.080048] env[63024]: DEBUG oslo_vmware.api [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2294.080048] env[63024]: value = "task-1952039" [ 2294.080048] env[63024]: _type = "Task" [ 2294.080048] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2294.088830] env[63024]: DEBUG oslo_vmware.api [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1952039, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2294.116081] env[63024]: DEBUG oslo_vmware.api [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952038, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2294.366054] env[63024]: DEBUG nova.network.neutron [-] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2294.394310] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b4038910-95e6-4cab-ae7e-4b099715a63a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2294.403500] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8a7a275-f063-4a76-a5bc-f52466b47914 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2294.430339] env[63024]: DEBUG nova.compute.manager [req-043b05e2-bda0-4b80-ba4c-2b74fa20dab5 req-bd4b835a-6d90-4a9d-8ebf-6d211e20578a service nova] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Detach interface failed, port_id=4ba24717-2947-46f0-9df8-733d8b40c345, reason: Instance 14bafeba-9f5b-4488-b29c-38939973deb9 could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 2294.590255] env[63024]: DEBUG oslo_vmware.api [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1952039, 'name': PowerOnVM_Task, 'duration_secs': 0.460779} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2294.590620] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2294.618340] env[63024]: DEBUG oslo_vmware.api [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952038, 'name': ReconfigVM_Task, 'duration_secs': 0.591048} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2294.618715] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Reconfigured VM instance instance-0000007c to attach disk [datastore1] 2e59f840-26bf-4192-b1ee-3645e9a64d1a/2e59f840-26bf-4192-b1ee-3645e9a64d1a.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2294.619451] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-70998dd0-1a07-4704-b1fc-8b38d2992115 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2294.627036] env[63024]: DEBUG oslo_vmware.api [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Waiting for the task: (returnval){ [ 2294.627036] env[63024]: value = "task-1952040" [ 2294.627036] env[63024]: _type = "Task" [ 2294.627036] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2294.636435] env[63024]: DEBUG oslo_vmware.api [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952040, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2294.695213] env[63024]: DEBUG nova.compute.manager [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2294.696213] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-031ab8a7-167b-480c-b369-b1b2c9a5c771 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2294.869097] env[63024]: INFO nova.compute.manager [-] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Took 1.39 seconds to deallocate network for instance. [ 2295.136838] env[63024]: DEBUG oslo_vmware.api [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952040, 'name': Rename_Task, 'duration_secs': 0.14992} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2295.137116] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2295.137391] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0fc2aed2-9954-4662-93b0-56e4a05a65aa {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2295.144545] env[63024]: DEBUG oslo_vmware.api [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Waiting for the task: (returnval){ [ 2295.144545] env[63024]: value = "task-1952041" [ 2295.144545] env[63024]: _type = "Task" [ 2295.144545] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2295.151730] env[63024]: DEBUG oslo_vmware.api [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952041, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2295.213678] env[63024]: DEBUG oslo_concurrency.lockutils [None req-af5ca74e-0ce1-47af-a546-335196c882c7 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "7d78b891-34c0-46dd-8b0d-ce80517232e1" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 20.567s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2295.375960] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6789f37d-f603-4aef-b86e-c89a8a1e99be tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2295.376347] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6789f37d-f603-4aef-b86e-c89a8a1e99be tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2295.376589] env[63024]: DEBUG nova.objects.instance [None req-6789f37d-f603-4aef-b86e-c89a8a1e99be tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lazy-loading 'resources' on Instance uuid 14bafeba-9f5b-4488-b29c-38939973deb9 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2295.654432] env[63024]: DEBUG oslo_vmware.api [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952041, 'name': PowerOnVM_Task} progress is 66%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2295.945497] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-991c1125-88b7-4abc-b4b5-871a21753f05 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2295.953110] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53cc7e08-a34f-46f4-8ffa-1b17ea530284 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2295.983520] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ba0b7df-5287-4179-8077-7b9b778dc7e2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2295.990860] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0273a5ed-c3e5-4801-bc0f-894ccadad7c1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2296.004298] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e5cfac41-2678-40b6-83a0-963c931be305 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "7d78b891-34c0-46dd-8b0d-ce80517232e1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2296.004547] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e5cfac41-2678-40b6-83a0-963c931be305 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "7d78b891-34c0-46dd-8b0d-ce80517232e1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2296.004753] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e5cfac41-2678-40b6-83a0-963c931be305 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "7d78b891-34c0-46dd-8b0d-ce80517232e1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2296.004935] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e5cfac41-2678-40b6-83a0-963c931be305 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "7d78b891-34c0-46dd-8b0d-ce80517232e1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2296.005123] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e5cfac41-2678-40b6-83a0-963c931be305 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "7d78b891-34c0-46dd-8b0d-ce80517232e1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2296.006899] env[63024]: DEBUG nova.compute.provider_tree [None req-6789f37d-f603-4aef-b86e-c89a8a1e99be tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2296.008613] env[63024]: INFO nova.compute.manager [None req-e5cfac41-2678-40b6-83a0-963c931be305 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Terminating instance [ 2296.155687] env[63024]: DEBUG oslo_vmware.api [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952041, 'name': PowerOnVM_Task, 'duration_secs': 0.978736} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2296.155922] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2296.156178] env[63024]: INFO nova.compute.manager [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Took 8.83 seconds to spawn the instance on the hypervisor. [ 2296.156356] env[63024]: DEBUG nova.compute.manager [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2296.157083] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67de19df-eedd-4e0e-952d-ecbd2e314f15 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2296.510785] env[63024]: DEBUG nova.scheduler.client.report [None req-6789f37d-f603-4aef-b86e-c89a8a1e99be tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2296.514331] env[63024]: DEBUG nova.compute.manager [None req-e5cfac41-2678-40b6-83a0-963c931be305 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2296.514545] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e5cfac41-2678-40b6-83a0-963c931be305 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2296.515551] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f739e4ed-2ea9-4937-8d90-e090fcb92d51 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2296.523564] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5cfac41-2678-40b6-83a0-963c931be305 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2296.523791] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f9557f63-e979-499e-b9ed-89e9d4912043 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2296.530780] env[63024]: DEBUG oslo_vmware.api [None req-e5cfac41-2678-40b6-83a0-963c931be305 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2296.530780] env[63024]: value = "task-1952042" [ 2296.530780] env[63024]: _type = "Task" [ 2296.530780] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2296.538604] env[63024]: DEBUG oslo_vmware.api [None req-e5cfac41-2678-40b6-83a0-963c931be305 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1952042, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2296.675499] env[63024]: INFO nova.compute.manager [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Took 20.34 seconds to build instance. [ 2296.996403] env[63024]: INFO nova.compute.manager [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Rebuilding instance [ 2297.017711] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6789f37d-f603-4aef-b86e-c89a8a1e99be tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.641s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2297.040058] env[63024]: DEBUG oslo_vmware.api [None req-e5cfac41-2678-40b6-83a0-963c931be305 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1952042, 'name': PowerOffVM_Task, 'duration_secs': 0.158394} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2297.040914] env[63024]: INFO nova.scheduler.client.report [None req-6789f37d-f603-4aef-b86e-c89a8a1e99be tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Deleted allocations for instance 14bafeba-9f5b-4488-b29c-38939973deb9 [ 2297.043866] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5cfac41-2678-40b6-83a0-963c931be305 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2297.044077] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e5cfac41-2678-40b6-83a0-963c931be305 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2297.046314] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-df0b178f-9637-4ade-afc8-a79e48b112da {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2297.050785] env[63024]: DEBUG nova.compute.manager [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2297.052479] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe45cc18-e6d2-4be0-b9b4-f78d919caa6f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2297.160166] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e5cfac41-2678-40b6-83a0-963c931be305 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2297.160404] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e5cfac41-2678-40b6-83a0-963c931be305 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2297.160578] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5cfac41-2678-40b6-83a0-963c931be305 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Deleting the datastore file [datastore1] 7d78b891-34c0-46dd-8b0d-ce80517232e1 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2297.160836] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-88fd9bba-c397-483f-a33b-22da0afdd03b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2297.167758] env[63024]: DEBUG oslo_vmware.api [None req-e5cfac41-2678-40b6-83a0-963c931be305 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for the task: (returnval){ [ 2297.167758] env[63024]: value = "task-1952044" [ 2297.167758] env[63024]: _type = "Task" [ 2297.167758] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2297.175429] env[63024]: DEBUG oslo_vmware.api [None req-e5cfac41-2678-40b6-83a0-963c931be305 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1952044, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2297.177909] env[63024]: DEBUG oslo_concurrency.lockutils [None req-54073164-f34f-4bfa-a85f-99b6b97c6cb8 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Lock "2e59f840-26bf-4192-b1ee-3645e9a64d1a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.847s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2297.554185] env[63024]: DEBUG oslo_concurrency.lockutils [None req-6789f37d-f603-4aef-b86e-c89a8a1e99be tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lock "14bafeba-9f5b-4488-b29c-38939973deb9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.854s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2297.677795] env[63024]: DEBUG oslo_vmware.api [None req-e5cfac41-2678-40b6-83a0-963c931be305 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Task: {'id': task-1952044, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.138041} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2297.678120] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5cfac41-2678-40b6-83a0-963c931be305 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2297.678321] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e5cfac41-2678-40b6-83a0-963c931be305 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2297.678529] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e5cfac41-2678-40b6-83a0-963c931be305 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2297.678704] env[63024]: INFO nova.compute.manager [None req-e5cfac41-2678-40b6-83a0-963c931be305 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Took 1.16 seconds to destroy the instance on the hypervisor. [ 2297.678973] env[63024]: DEBUG oslo.service.loopingcall [None req-e5cfac41-2678-40b6-83a0-963c931be305 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2297.679196] env[63024]: DEBUG nova.compute.manager [-] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2297.679293] env[63024]: DEBUG nova.network.neutron [-] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2298.066981] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2298.066981] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c75b23e8-4175-4007-811c-88cecb675535 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2298.075458] env[63024]: DEBUG oslo_vmware.api [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Waiting for the task: (returnval){ [ 2298.075458] env[63024]: value = "task-1952045" [ 2298.075458] env[63024]: _type = "Task" [ 2298.075458] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2298.083518] env[63024]: DEBUG oslo_vmware.api [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952045, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2298.166513] env[63024]: DEBUG nova.compute.manager [req-75db8430-0b98-404e-a6de-b563e7694d58 req-b8da4f7d-6fce-4584-8796-22e273f18278 service nova] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Received event network-vif-deleted-4dc6f8a3-c6c8-459b-8a05-db8924adb128 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2298.166513] env[63024]: INFO nova.compute.manager [req-75db8430-0b98-404e-a6de-b563e7694d58 req-b8da4f7d-6fce-4584-8796-22e273f18278 service nova] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Neutron deleted interface 4dc6f8a3-c6c8-459b-8a05-db8924adb128; detaching it from the instance and deleting it from the info cache [ 2298.166513] env[63024]: DEBUG nova.network.neutron [req-75db8430-0b98-404e-a6de-b563e7694d58 req-b8da4f7d-6fce-4584-8796-22e273f18278 service nova] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2298.583943] env[63024]: DEBUG oslo_vmware.api [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952045, 'name': PowerOffVM_Task, 'duration_secs': 0.115073} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2298.584442] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2298.584791] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2298.586466] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8240ea67-2821-4136-8842-e11f3698aa1f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2298.593878] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2298.594148] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e86af353-247f-4433-9b22-4274e1da4e5d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2298.619485] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2298.619785] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2298.619992] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Deleting the datastore file [datastore1] 2e59f840-26bf-4192-b1ee-3645e9a64d1a {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2298.620316] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2a04c324-ca39-4871-9b16-4f9fe2ddb1b1 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2298.628208] env[63024]: DEBUG oslo_vmware.api [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Waiting for the task: (returnval){ [ 2298.628208] env[63024]: value = "task-1952047" [ 2298.628208] env[63024]: _type = "Task" [ 2298.628208] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2298.636832] env[63024]: DEBUG oslo_vmware.api [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952047, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2298.642243] env[63024]: DEBUG nova.network.neutron [-] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2298.669975] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-44915a57-acba-4357-a6f0-47c36e032801 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2298.681314] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3bb1a8e-f52c-4d0f-a391-e56a58957631 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2298.709509] env[63024]: DEBUG nova.compute.manager [req-75db8430-0b98-404e-a6de-b563e7694d58 req-b8da4f7d-6fce-4584-8796-22e273f18278 service nova] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Detach interface failed, port_id=4dc6f8a3-c6c8-459b-8a05-db8924adb128, reason: Instance 7d78b891-34c0-46dd-8b0d-ce80517232e1 could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 2299.140096] env[63024]: DEBUG oslo_vmware.api [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952047, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.091892} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2299.140096] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2299.140096] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2299.140096] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2299.145730] env[63024]: INFO nova.compute.manager [-] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Took 1.47 seconds to deallocate network for instance. [ 2299.259419] env[63024]: DEBUG oslo_concurrency.lockutils [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquiring lock "fc828ce0-b08d-41f4-afb5-ea8968bbf62e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2299.259663] env[63024]: DEBUG oslo_concurrency.lockutils [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lock "fc828ce0-b08d-41f4-afb5-ea8968bbf62e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2299.651303] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e5cfac41-2678-40b6-83a0-963c931be305 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2299.651457] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e5cfac41-2678-40b6-83a0-963c931be305 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2299.651679] env[63024]: DEBUG nova.objects.instance [None req-e5cfac41-2678-40b6-83a0-963c931be305 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lazy-loading 'resources' on Instance uuid 7d78b891-34c0-46dd-8b0d-ce80517232e1 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2299.764017] env[63024]: DEBUG nova.compute.manager [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2300.174547] env[63024]: DEBUG nova.virt.hardware [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2300.174798] env[63024]: DEBUG nova.virt.hardware [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2300.174955] env[63024]: DEBUG nova.virt.hardware [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2300.175152] env[63024]: DEBUG nova.virt.hardware [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2300.175304] env[63024]: DEBUG nova.virt.hardware [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2300.175457] env[63024]: DEBUG nova.virt.hardware [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2300.175663] env[63024]: DEBUG nova.virt.hardware [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2300.175823] env[63024]: DEBUG nova.virt.hardware [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2300.175991] env[63024]: DEBUG nova.virt.hardware [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2300.176171] env[63024]: DEBUG nova.virt.hardware [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2300.176377] env[63024]: DEBUG nova.virt.hardware [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2300.177827] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48f47e63-e1b5-48b5-9fa4-f7c318ad24af {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2300.185778] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f8ac7f7-d9d5-4e16-8d64-d68f0ca39037 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2300.201620] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Instance VIF info [] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2300.207141] env[63024]: DEBUG oslo.service.loopingcall [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2300.209452] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2300.210040] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ed8414e5-c4b5-48dc-ad4a-d0d76f9f674a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2300.229519] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2300.229519] env[63024]: value = "task-1952048" [ 2300.229519] env[63024]: _type = "Task" [ 2300.229519] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2300.233654] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-102800cc-fee8-434b-ac83-5d70876cd296 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2300.240501] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1952048, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2300.242888] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e374e66-417f-4ac4-957a-17c4965fb6d3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2300.277221] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-110c6faa-5ab9-47ec-9638-5741b18b99c6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2300.286482] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7e42ca1-a68c-464d-9263-23e69b1993ac {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2300.292500] env[63024]: DEBUG oslo_concurrency.lockutils [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2300.301229] env[63024]: DEBUG nova.compute.provider_tree [None req-e5cfac41-2678-40b6-83a0-963c931be305 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2300.739630] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1952048, 'name': CreateVM_Task, 'duration_secs': 0.252736} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2300.739810] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2300.740213] env[63024]: DEBUG oslo_concurrency.lockutils [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2300.740378] env[63024]: DEBUG oslo_concurrency.lockutils [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2300.740710] env[63024]: DEBUG oslo_concurrency.lockutils [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2300.740959] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f64eb53-2062-4f67-976f-0b7e91a1c620 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2300.745052] env[63024]: DEBUG oslo_vmware.api [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Waiting for the task: (returnval){ [ 2300.745052] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]523f123e-c737-8046-a06a-cc9e21da88e9" [ 2300.745052] env[63024]: _type = "Task" [ 2300.745052] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2300.752158] env[63024]: DEBUG oslo_vmware.api [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]523f123e-c737-8046-a06a-cc9e21da88e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2300.804359] env[63024]: DEBUG nova.scheduler.client.report [None req-e5cfac41-2678-40b6-83a0-963c931be305 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2301.255514] env[63024]: DEBUG oslo_vmware.api [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]523f123e-c737-8046-a06a-cc9e21da88e9, 'name': SearchDatastore_Task, 'duration_secs': 0.009303} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2301.255757] env[63024]: DEBUG oslo_concurrency.lockutils [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2301.255992] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2301.256291] env[63024]: DEBUG oslo_concurrency.lockutils [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2301.256482] env[63024]: DEBUG oslo_concurrency.lockutils [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2301.256678] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2301.256929] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-36022963-30a9-4f0a-9582-225b820480db {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2301.264490] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2301.264678] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2301.265347] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b345f5d5-93bf-40b3-a697-9369c6f25cc4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2301.270135] env[63024]: DEBUG oslo_vmware.api [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Waiting for the task: (returnval){ [ 2301.270135] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52d2b28e-602d-dd30-fea3-e1a39abd6d0c" [ 2301.270135] env[63024]: _type = "Task" [ 2301.270135] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2301.276873] env[63024]: DEBUG oslo_vmware.api [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52d2b28e-602d-dd30-fea3-e1a39abd6d0c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2301.309765] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e5cfac41-2678-40b6-83a0-963c931be305 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.658s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2301.311883] env[63024]: DEBUG oslo_concurrency.lockutils [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.019s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2301.313440] env[63024]: INFO nova.compute.claims [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2301.328099] env[63024]: INFO nova.scheduler.client.report [None req-e5cfac41-2678-40b6-83a0-963c931be305 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Deleted allocations for instance 7d78b891-34c0-46dd-8b0d-ce80517232e1 [ 2301.781209] env[63024]: DEBUG oslo_vmware.api [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52d2b28e-602d-dd30-fea3-e1a39abd6d0c, 'name': SearchDatastore_Task, 'duration_secs': 0.007963} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2301.781974] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48887801-d1a8-4eb6-9d2c-6317ad4b7d50 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2301.786996] env[63024]: DEBUG oslo_vmware.api [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Waiting for the task: (returnval){ [ 2301.786996] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5271e21e-2eed-011a-ce7c-14562cac05e8" [ 2301.786996] env[63024]: _type = "Task" [ 2301.786996] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2301.795375] env[63024]: DEBUG oslo_vmware.api [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5271e21e-2eed-011a-ce7c-14562cac05e8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2301.836116] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e5cfac41-2678-40b6-83a0-963c931be305 tempest-ServerActionsTestOtherB-772439459 tempest-ServerActionsTestOtherB-772439459-project-member] Lock "7d78b891-34c0-46dd-8b0d-ce80517232e1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.831s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2302.297639] env[63024]: DEBUG oslo_vmware.api [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5271e21e-2eed-011a-ce7c-14562cac05e8, 'name': SearchDatastore_Task, 'duration_secs': 0.00897} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2302.297860] env[63024]: DEBUG oslo_concurrency.lockutils [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2302.298140] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 2e59f840-26bf-4192-b1ee-3645e9a64d1a/2e59f840-26bf-4192-b1ee-3645e9a64d1a.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2302.298395] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-209ca686-3050-4f2f-a468-9f6d94a8179a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2302.304912] env[63024]: DEBUG oslo_vmware.api [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Waiting for the task: (returnval){ [ 2302.304912] env[63024]: value = "task-1952050" [ 2302.304912] env[63024]: _type = "Task" [ 2302.304912] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2302.311776] env[63024]: DEBUG oslo_vmware.api [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952050, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2302.376666] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f44987d-2324-46a3-906b-24f53a6e7531 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2302.383985] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-882d56c5-e9dd-4d4c-ad1c-35238905fdb6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2302.413910] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e94c0d07-4543-41d3-8dd5-8d5fcf88b041 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2302.421506] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00543867-1453-456c-b6b9-a3ae171ab291 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2302.434380] env[63024]: DEBUG nova.compute.provider_tree [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2302.814995] env[63024]: DEBUG oslo_vmware.api [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952050, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.435078} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2302.815294] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 2e59f840-26bf-4192-b1ee-3645e9a64d1a/2e59f840-26bf-4192-b1ee-3645e9a64d1a.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2302.815529] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2302.815775] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-523c4b46-5482-482f-a3b6-cae51be2da42 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2302.822498] env[63024]: DEBUG oslo_vmware.api [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Waiting for the task: (returnval){ [ 2302.822498] env[63024]: value = "task-1952051" [ 2302.822498] env[63024]: _type = "Task" [ 2302.822498] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2302.832013] env[63024]: DEBUG oslo_vmware.api [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952051, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2302.938024] env[63024]: DEBUG nova.scheduler.client.report [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2303.332107] env[63024]: DEBUG oslo_vmware.api [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952051, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.05853} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2303.332392] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2303.333145] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-696b2f9e-75a7-409b-bb91-d20caf5df165 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2303.351884] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Reconfiguring VM instance instance-0000007c to attach disk [datastore1] 2e59f840-26bf-4192-b1ee-3645e9a64d1a/2e59f840-26bf-4192-b1ee-3645e9a64d1a.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2303.352136] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6e26040d-507e-4344-b373-f104608c6aa3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2303.371611] env[63024]: DEBUG oslo_vmware.api [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Waiting for the task: (returnval){ [ 2303.371611] env[63024]: value = "task-1952052" [ 2303.371611] env[63024]: _type = "Task" [ 2303.371611] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2303.378990] env[63024]: DEBUG oslo_vmware.api [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952052, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2303.443089] env[63024]: DEBUG oslo_concurrency.lockutils [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.131s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2303.443678] env[63024]: DEBUG nova.compute.manager [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2303.881241] env[63024]: DEBUG oslo_vmware.api [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952052, 'name': ReconfigVM_Task, 'duration_secs': 0.2377} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2303.881532] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Reconfigured VM instance instance-0000007c to attach disk [datastore1] 2e59f840-26bf-4192-b1ee-3645e9a64d1a/2e59f840-26bf-4192-b1ee-3645e9a64d1a.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2303.882148] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8494d588-fd70-4c14-b527-7d323cee84da {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2303.888331] env[63024]: DEBUG oslo_vmware.api [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Waiting for the task: (returnval){ [ 2303.888331] env[63024]: value = "task-1952053" [ 2303.888331] env[63024]: _type = "Task" [ 2303.888331] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2303.897865] env[63024]: DEBUG oslo_vmware.api [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952053, 'name': Rename_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2303.948541] env[63024]: DEBUG nova.compute.utils [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2303.950043] env[63024]: DEBUG nova.compute.manager [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Allocating IP information in the background. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 2303.950204] env[63024]: DEBUG nova.network.neutron [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] allocate_for_instance() {{(pid=63024) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 2304.026343] env[63024]: DEBUG nova.policy [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2d70f21af28e4c14a8f7b55090aa435f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dd0c44555e30414c83750b762e243dc1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=63024) authorize /opt/stack/nova/nova/policy.py:192}} [ 2304.285677] env[63024]: DEBUG nova.network.neutron [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Successfully created port: d26f8bfe-a2b6-4905-9785-1c97e45252c0 {{(pid=63024) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2304.398296] env[63024]: DEBUG oslo_vmware.api [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952053, 'name': Rename_Task, 'duration_secs': 0.135339} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2304.398621] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2304.398766] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f3d51174-9389-42ed-981e-2c31a6e7b02b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2304.404657] env[63024]: DEBUG oslo_vmware.api [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Waiting for the task: (returnval){ [ 2304.404657] env[63024]: value = "task-1952054" [ 2304.404657] env[63024]: _type = "Task" [ 2304.404657] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2304.411643] env[63024]: DEBUG oslo_vmware.api [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952054, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2304.453394] env[63024]: DEBUG nova.compute.manager [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2304.914086] env[63024]: DEBUG oslo_vmware.api [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952054, 'name': PowerOnVM_Task, 'duration_secs': 0.435656} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2304.915049] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2304.915049] env[63024]: DEBUG nova.compute.manager [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2304.915378] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d66b9a6-805e-42a6-8f45-d297810ccc03 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2305.432851] env[63024]: DEBUG oslo_concurrency.lockutils [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2305.432851] env[63024]: DEBUG oslo_concurrency.lockutils [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2305.433093] env[63024]: DEBUG nova.objects.instance [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63024) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 2305.462623] env[63024]: DEBUG nova.compute.manager [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2305.489147] env[63024]: DEBUG nova.virt.hardware [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2305.489432] env[63024]: DEBUG nova.virt.hardware [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2305.489603] env[63024]: DEBUG nova.virt.hardware [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2305.489835] env[63024]: DEBUG nova.virt.hardware [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2305.489930] env[63024]: DEBUG nova.virt.hardware [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2305.490098] env[63024]: DEBUG nova.virt.hardware [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2305.490317] env[63024]: DEBUG nova.virt.hardware [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2305.490477] env[63024]: DEBUG nova.virt.hardware [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2305.490645] env[63024]: DEBUG nova.virt.hardware [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2305.490832] env[63024]: DEBUG nova.virt.hardware [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2305.490977] env[63024]: DEBUG nova.virt.hardware [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2305.491868] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11b396fb-13f7-4080-b354-a0969804cd16 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2305.500592] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-152c3a7d-3d57-4f20-b781-0fde7876071d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2305.659920] env[63024]: DEBUG nova.compute.manager [req-6fbf26a3-322f-46f5-82a0-6aabe7104b3e req-dc0ccf0d-b492-4b42-bfe2-804b942b3824 service nova] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Received event network-vif-plugged-d26f8bfe-a2b6-4905-9785-1c97e45252c0 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2305.660176] env[63024]: DEBUG oslo_concurrency.lockutils [req-6fbf26a3-322f-46f5-82a0-6aabe7104b3e req-dc0ccf0d-b492-4b42-bfe2-804b942b3824 service nova] Acquiring lock "fc828ce0-b08d-41f4-afb5-ea8968bbf62e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2305.660390] env[63024]: DEBUG oslo_concurrency.lockutils [req-6fbf26a3-322f-46f5-82a0-6aabe7104b3e req-dc0ccf0d-b492-4b42-bfe2-804b942b3824 service nova] Lock "fc828ce0-b08d-41f4-afb5-ea8968bbf62e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2305.660641] env[63024]: DEBUG oslo_concurrency.lockutils [req-6fbf26a3-322f-46f5-82a0-6aabe7104b3e req-dc0ccf0d-b492-4b42-bfe2-804b942b3824 service nova] Lock "fc828ce0-b08d-41f4-afb5-ea8968bbf62e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2305.660863] env[63024]: DEBUG nova.compute.manager [req-6fbf26a3-322f-46f5-82a0-6aabe7104b3e req-dc0ccf0d-b492-4b42-bfe2-804b942b3824 service nova] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] No waiting events found dispatching network-vif-plugged-d26f8bfe-a2b6-4905-9785-1c97e45252c0 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2305.661052] env[63024]: WARNING nova.compute.manager [req-6fbf26a3-322f-46f5-82a0-6aabe7104b3e req-dc0ccf0d-b492-4b42-bfe2-804b942b3824 service nova] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Received unexpected event network-vif-plugged-d26f8bfe-a2b6-4905-9785-1c97e45252c0 for instance with vm_state building and task_state spawning. [ 2305.746767] env[63024]: DEBUG nova.network.neutron [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Successfully updated port: d26f8bfe-a2b6-4905-9785-1c97e45252c0 {{(pid=63024) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2305.801776] env[63024]: DEBUG oslo_concurrency.lockutils [None req-142f5d35-7806-4d91-828c-559c959e28f5 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Acquiring lock "2e59f840-26bf-4192-b1ee-3645e9a64d1a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2305.801776] env[63024]: DEBUG oslo_concurrency.lockutils [None req-142f5d35-7806-4d91-828c-559c959e28f5 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Lock "2e59f840-26bf-4192-b1ee-3645e9a64d1a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2305.801776] env[63024]: DEBUG oslo_concurrency.lockutils [None req-142f5d35-7806-4d91-828c-559c959e28f5 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Acquiring lock "2e59f840-26bf-4192-b1ee-3645e9a64d1a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2305.801776] env[63024]: DEBUG oslo_concurrency.lockutils [None req-142f5d35-7806-4d91-828c-559c959e28f5 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Lock "2e59f840-26bf-4192-b1ee-3645e9a64d1a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2305.801776] env[63024]: DEBUG oslo_concurrency.lockutils [None req-142f5d35-7806-4d91-828c-559c959e28f5 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Lock "2e59f840-26bf-4192-b1ee-3645e9a64d1a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2305.804439] env[63024]: INFO nova.compute.manager [None req-142f5d35-7806-4d91-828c-559c959e28f5 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Terminating instance [ 2306.252366] env[63024]: DEBUG oslo_concurrency.lockutils [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquiring lock "refresh_cache-fc828ce0-b08d-41f4-afb5-ea8968bbf62e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2306.256021] env[63024]: DEBUG oslo_concurrency.lockutils [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquired lock "refresh_cache-fc828ce0-b08d-41f4-afb5-ea8968bbf62e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2306.256021] env[63024]: DEBUG nova.network.neutron [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2306.310021] env[63024]: DEBUG oslo_concurrency.lockutils [None req-142f5d35-7806-4d91-828c-559c959e28f5 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Acquiring lock "refresh_cache-2e59f840-26bf-4192-b1ee-3645e9a64d1a" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2306.310021] env[63024]: DEBUG oslo_concurrency.lockutils [None req-142f5d35-7806-4d91-828c-559c959e28f5 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Acquired lock "refresh_cache-2e59f840-26bf-4192-b1ee-3645e9a64d1a" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2306.310021] env[63024]: DEBUG nova.network.neutron [None req-142f5d35-7806-4d91-828c-559c959e28f5 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2306.441038] env[63024]: DEBUG oslo_concurrency.lockutils [None req-582a99b6-8d34-4c77-bb03-09b65796b8cc tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.008s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2306.795706] env[63024]: DEBUG nova.network.neutron [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2306.829880] env[63024]: DEBUG nova.network.neutron [None req-142f5d35-7806-4d91-828c-559c959e28f5 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2306.910704] env[63024]: DEBUG nova.network.neutron [None req-142f5d35-7806-4d91-828c-559c959e28f5 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2306.973913] env[63024]: DEBUG nova.network.neutron [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Updating instance_info_cache with network_info: [{"id": "d26f8bfe-a2b6-4905-9785-1c97e45252c0", "address": "fa:16:3e:16:2b:e6", "network": {"id": "5503f5f5-9a63-4bb8-bc39-97863100c3df", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1218611362-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd0c44555e30414c83750b762e243dc1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd26f8bfe-a2", "ovs_interfaceid": "d26f8bfe-a2b6-4905-9785-1c97e45252c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2307.413595] env[63024]: DEBUG oslo_concurrency.lockutils [None req-142f5d35-7806-4d91-828c-559c959e28f5 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Releasing lock "refresh_cache-2e59f840-26bf-4192-b1ee-3645e9a64d1a" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2307.414126] env[63024]: DEBUG nova.compute.manager [None req-142f5d35-7806-4d91-828c-559c959e28f5 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2307.414324] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-142f5d35-7806-4d91-828c-559c959e28f5 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2307.415280] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91148a7c-c563-49c8-a8f9-74f34b06048a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2307.424327] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-142f5d35-7806-4d91-828c-559c959e28f5 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2307.424624] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b71979e1-07a9-4a58-bffe-12a58ccdbca7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2307.431351] env[63024]: DEBUG oslo_vmware.api [None req-142f5d35-7806-4d91-828c-559c959e28f5 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Waiting for the task: (returnval){ [ 2307.431351] env[63024]: value = "task-1952055" [ 2307.431351] env[63024]: _type = "Task" [ 2307.431351] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2307.439680] env[63024]: DEBUG oslo_vmware.api [None req-142f5d35-7806-4d91-828c-559c959e28f5 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952055, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2307.477534] env[63024]: DEBUG oslo_concurrency.lockutils [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Releasing lock "refresh_cache-fc828ce0-b08d-41f4-afb5-ea8968bbf62e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2307.477965] env[63024]: DEBUG nova.compute.manager [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Instance network_info: |[{"id": "d26f8bfe-a2b6-4905-9785-1c97e45252c0", "address": "fa:16:3e:16:2b:e6", "network": {"id": "5503f5f5-9a63-4bb8-bc39-97863100c3df", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1218611362-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd0c44555e30414c83750b762e243dc1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd26f8bfe-a2", "ovs_interfaceid": "d26f8bfe-a2b6-4905-9785-1c97e45252c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 2307.478417] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:16:2b:e6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '55c757ac-f8b2-466d-b634-07dbd100b312', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd26f8bfe-a2b6-4905-9785-1c97e45252c0', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2307.487646] env[63024]: DEBUG oslo.service.loopingcall [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2307.487948] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2307.488201] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c86b5951-31a5-40f5-9b9a-1164e7ff9390 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2307.511694] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2307.511694] env[63024]: value = "task-1952056" [ 2307.511694] env[63024]: _type = "Task" [ 2307.511694] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2307.519695] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1952056, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2307.683896] env[63024]: DEBUG nova.compute.manager [req-1ff8b6b9-677b-40b4-86aa-180807aca85d req-08cf0e0b-c4da-43f2-ba3f-cf05e96721b2 service nova] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Received event network-changed-d26f8bfe-a2b6-4905-9785-1c97e45252c0 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2307.684094] env[63024]: DEBUG nova.compute.manager [req-1ff8b6b9-677b-40b4-86aa-180807aca85d req-08cf0e0b-c4da-43f2-ba3f-cf05e96721b2 service nova] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Refreshing instance network info cache due to event network-changed-d26f8bfe-a2b6-4905-9785-1c97e45252c0. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2307.684406] env[63024]: DEBUG oslo_concurrency.lockutils [req-1ff8b6b9-677b-40b4-86aa-180807aca85d req-08cf0e0b-c4da-43f2-ba3f-cf05e96721b2 service nova] Acquiring lock "refresh_cache-fc828ce0-b08d-41f4-afb5-ea8968bbf62e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2307.684459] env[63024]: DEBUG oslo_concurrency.lockutils [req-1ff8b6b9-677b-40b4-86aa-180807aca85d req-08cf0e0b-c4da-43f2-ba3f-cf05e96721b2 service nova] Acquired lock "refresh_cache-fc828ce0-b08d-41f4-afb5-ea8968bbf62e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2307.684602] env[63024]: DEBUG nova.network.neutron [req-1ff8b6b9-677b-40b4-86aa-180807aca85d req-08cf0e0b-c4da-43f2-ba3f-cf05e96721b2 service nova] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Refreshing network info cache for port d26f8bfe-a2b6-4905-9785-1c97e45252c0 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2307.707249] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2307.707451] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2307.941223] env[63024]: DEBUG oslo_vmware.api [None req-142f5d35-7806-4d91-828c-559c959e28f5 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952055, 'name': PowerOffVM_Task, 'duration_secs': 0.111962} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2307.941500] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-142f5d35-7806-4d91-828c-559c959e28f5 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2307.941672] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-142f5d35-7806-4d91-828c-559c959e28f5 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2307.941917] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4fdea0e4-3e47-4b9a-9ba2-1bf27b80c3bc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2307.965525] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-142f5d35-7806-4d91-828c-559c959e28f5 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2307.965725] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-142f5d35-7806-4d91-828c-559c959e28f5 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2307.965907] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-142f5d35-7806-4d91-828c-559c959e28f5 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Deleting the datastore file [datastore1] 2e59f840-26bf-4192-b1ee-3645e9a64d1a {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2307.966160] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5a5130ef-d03a-4dad-8539-7ca9b2c7deb2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2307.972827] env[63024]: DEBUG oslo_vmware.api [None req-142f5d35-7806-4d91-828c-559c959e28f5 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Waiting for the task: (returnval){ [ 2307.972827] env[63024]: value = "task-1952058" [ 2307.972827] env[63024]: _type = "Task" [ 2307.972827] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2307.980760] env[63024]: DEBUG oslo_vmware.api [None req-142f5d35-7806-4d91-828c-559c959e28f5 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952058, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2308.021674] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1952056, 'name': CreateVM_Task, 'duration_secs': 0.367004} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2308.021674] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2308.022428] env[63024]: DEBUG oslo_concurrency.lockutils [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2308.022689] env[63024]: DEBUG oslo_concurrency.lockutils [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2308.023019] env[63024]: DEBUG oslo_concurrency.lockutils [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2308.023334] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c8c2f20a-dc0a-4696-83ad-5234895962d5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2308.028052] env[63024]: DEBUG oslo_vmware.api [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2308.028052] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52d4d530-a499-a8d6-f284-91e3703a1c01" [ 2308.028052] env[63024]: _type = "Task" [ 2308.028052] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2308.036022] env[63024]: DEBUG oslo_vmware.api [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52d4d530-a499-a8d6-f284-91e3703a1c01, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2308.211908] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2308.212146] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Starting heal instance info cache {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10258}} [ 2308.382324] env[63024]: DEBUG nova.network.neutron [req-1ff8b6b9-677b-40b4-86aa-180807aca85d req-08cf0e0b-c4da-43f2-ba3f-cf05e96721b2 service nova] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Updated VIF entry in instance network info cache for port d26f8bfe-a2b6-4905-9785-1c97e45252c0. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2308.382687] env[63024]: DEBUG nova.network.neutron [req-1ff8b6b9-677b-40b4-86aa-180807aca85d req-08cf0e0b-c4da-43f2-ba3f-cf05e96721b2 service nova] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Updating instance_info_cache with network_info: [{"id": "d26f8bfe-a2b6-4905-9785-1c97e45252c0", "address": "fa:16:3e:16:2b:e6", "network": {"id": "5503f5f5-9a63-4bb8-bc39-97863100c3df", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1218611362-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd0c44555e30414c83750b762e243dc1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd26f8bfe-a2", "ovs_interfaceid": "d26f8bfe-a2b6-4905-9785-1c97e45252c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2308.482651] env[63024]: DEBUG oslo_vmware.api [None req-142f5d35-7806-4d91-828c-559c959e28f5 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952058, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.088334} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2308.482917] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-142f5d35-7806-4d91-828c-559c959e28f5 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2308.483116] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-142f5d35-7806-4d91-828c-559c959e28f5 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2308.483301] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-142f5d35-7806-4d91-828c-559c959e28f5 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2308.483477] env[63024]: INFO nova.compute.manager [None req-142f5d35-7806-4d91-828c-559c959e28f5 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Took 1.07 seconds to destroy the instance on the hypervisor. [ 2308.483730] env[63024]: DEBUG oslo.service.loopingcall [None req-142f5d35-7806-4d91-828c-559c959e28f5 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2308.483910] env[63024]: DEBUG nova.compute.manager [-] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2308.484062] env[63024]: DEBUG nova.network.neutron [-] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2308.499057] env[63024]: DEBUG nova.network.neutron [-] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2308.539207] env[63024]: DEBUG oslo_vmware.api [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52d4d530-a499-a8d6-f284-91e3703a1c01, 'name': SearchDatastore_Task, 'duration_secs': 0.008891} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2308.539744] env[63024]: DEBUG oslo_concurrency.lockutils [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2308.539984] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2308.540233] env[63024]: DEBUG oslo_concurrency.lockutils [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2308.540385] env[63024]: DEBUG oslo_concurrency.lockutils [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2308.540561] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2308.540809] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2979a050-d7c8-4f58-9b35-3f42f33f4165 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2308.548888] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2308.549083] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2308.549756] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d4d612a7-3d13-4989-b534-74a9aff4e53b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2308.554574] env[63024]: DEBUG oslo_vmware.api [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2308.554574] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5298d8b4-0449-0a01-5315-a6d54f6359d8" [ 2308.554574] env[63024]: _type = "Task" [ 2308.554574] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2308.561407] env[63024]: DEBUG oslo_vmware.api [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5298d8b4-0449-0a01-5315-a6d54f6359d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2308.714411] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Didn't find any instances for network info cache update. {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10344}} [ 2308.714675] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2308.714844] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2308.714993] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2308.715158] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2308.715302] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2308.715447] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2308.715578] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63024) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10877}} [ 2308.715717] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager.update_available_resource {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2308.885612] env[63024]: DEBUG oslo_concurrency.lockutils [req-1ff8b6b9-677b-40b4-86aa-180807aca85d req-08cf0e0b-c4da-43f2-ba3f-cf05e96721b2 service nova] Releasing lock "refresh_cache-fc828ce0-b08d-41f4-afb5-ea8968bbf62e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2309.001818] env[63024]: DEBUG nova.network.neutron [-] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2309.065047] env[63024]: DEBUG oslo_vmware.api [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5298d8b4-0449-0a01-5315-a6d54f6359d8, 'name': SearchDatastore_Task, 'duration_secs': 0.007949} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2309.065509] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-645b17a2-7a70-41ec-9543-99f62dcf6e76 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.070439] env[63024]: DEBUG oslo_vmware.api [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2309.070439] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52de9c18-2337-31d8-c22b-3846c1548160" [ 2309.070439] env[63024]: _type = "Task" [ 2309.070439] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2309.077723] env[63024]: DEBUG oslo_vmware.api [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52de9c18-2337-31d8-c22b-3846c1548160, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2309.218913] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2309.219181] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2309.219363] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2309.219523] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63024) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2309.220406] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3247c9e-2c4f-4c56-9ccf-82b2b65cdd8a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.228662] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-525c836e-7b40-45e9-bd07-e3b307dadd84 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.241885] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9306398f-d791-4d39-afdc-7165fc79f0c7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.247985] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f28ae1f-f48d-405a-b4ad-350c107e7a26 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.277660] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180538MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=63024) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2309.277806] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2309.277983] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2309.506540] env[63024]: INFO nova.compute.manager [-] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Took 1.02 seconds to deallocate network for instance. [ 2309.580798] env[63024]: DEBUG oslo_vmware.api [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52de9c18-2337-31d8-c22b-3846c1548160, 'name': SearchDatastore_Task, 'duration_secs': 0.008903} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2309.581037] env[63024]: DEBUG oslo_concurrency.lockutils [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2309.581300] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] fc828ce0-b08d-41f4-afb5-ea8968bbf62e/fc828ce0-b08d-41f4-afb5-ea8968bbf62e.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2309.581553] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b0356dc9-1efd-46ae-ae60-1b3b7cc50316 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2309.587942] env[63024]: DEBUG oslo_vmware.api [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2309.587942] env[63024]: value = "task-1952059" [ 2309.587942] env[63024]: _type = "Task" [ 2309.587942] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2309.595529] env[63024]: DEBUG oslo_vmware.api [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952059, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2310.014060] env[63024]: DEBUG oslo_concurrency.lockutils [None req-142f5d35-7806-4d91-828c-559c959e28f5 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2310.097569] env[63024]: DEBUG oslo_vmware.api [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952059, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.418787} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2310.097882] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] fc828ce0-b08d-41f4-afb5-ea8968bbf62e/fc828ce0-b08d-41f4-afb5-ea8968bbf62e.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2310.098115] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2310.098409] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-54d92528-d9e3-471a-ba15-a2b26a981443 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.105120] env[63024]: DEBUG oslo_vmware.api [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2310.105120] env[63024]: value = "task-1952060" [ 2310.105120] env[63024]: _type = "Task" [ 2310.105120] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2310.112705] env[63024]: DEBUG oslo_vmware.api [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952060, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2310.307573] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance e79d7141-00e5-40c9-a88f-244a3ae685d8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2310.307742] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance 2e59f840-26bf-4192-b1ee-3645e9a64d1a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2310.307866] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance fc828ce0-b08d-41f4-afb5-ea8968bbf62e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2310.308068] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2310.308211] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1088MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2310.359719] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ce1e5db-1b46-404a-b4c0-eac41dc9489a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.367739] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae4173ee-142c-429a-9d56-e4c3a17d7d08 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.397822] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e90ddb2-7ac1-4c72-ad8b-f2ecdea9808a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.405302] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45bac4f5-6f47-45c1-a895-c11b83fc98dc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.420231] env[63024]: DEBUG nova.compute.provider_tree [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2310.615513] env[63024]: DEBUG oslo_vmware.api [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952060, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.219572} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2310.615907] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2310.616785] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95184c40-97d3-4e94-8efc-9ee39aac0c1b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.638853] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Reconfiguring VM instance instance-0000007d to attach disk [datastore1] fc828ce0-b08d-41f4-afb5-ea8968bbf62e/fc828ce0-b08d-41f4-afb5-ea8968bbf62e.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2310.639139] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6a352334-4ab5-40c2-9688-66be229c0ae3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2310.658263] env[63024]: DEBUG oslo_vmware.api [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2310.658263] env[63024]: value = "task-1952061" [ 2310.658263] env[63024]: _type = "Task" [ 2310.658263] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2310.666105] env[63024]: DEBUG oslo_vmware.api [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952061, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2310.923402] env[63024]: DEBUG nova.scheduler.client.report [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2311.169204] env[63024]: DEBUG oslo_vmware.api [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952061, 'name': ReconfigVM_Task, 'duration_secs': 0.26343} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2311.169478] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Reconfigured VM instance instance-0000007d to attach disk [datastore1] fc828ce0-b08d-41f4-afb5-ea8968bbf62e/fc828ce0-b08d-41f4-afb5-ea8968bbf62e.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2311.170102] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ab54e87b-5dd0-4ee9-95cf-c0a854900497 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.177121] env[63024]: DEBUG oslo_vmware.api [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2311.177121] env[63024]: value = "task-1952062" [ 2311.177121] env[63024]: _type = "Task" [ 2311.177121] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2311.184253] env[63024]: DEBUG oslo_vmware.api [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952062, 'name': Rename_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2311.427963] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63024) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2311.428205] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.150s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2311.428487] env[63024]: DEBUG oslo_concurrency.lockutils [None req-142f5d35-7806-4d91-828c-559c959e28f5 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.415s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2311.428718] env[63024]: DEBUG nova.objects.instance [None req-142f5d35-7806-4d91-828c-559c959e28f5 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Lazy-loading 'resources' on Instance uuid 2e59f840-26bf-4192-b1ee-3645e9a64d1a {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2311.686455] env[63024]: DEBUG oslo_vmware.api [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952062, 'name': Rename_Task, 'duration_secs': 0.136438} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2311.686733] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2311.686951] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e5cfdd82-6193-4f68-9104-1ba3a803b25d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.693276] env[63024]: DEBUG oslo_vmware.api [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2311.693276] env[63024]: value = "task-1952063" [ 2311.693276] env[63024]: _type = "Task" [ 2311.693276] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2311.703496] env[63024]: DEBUG oslo_vmware.api [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952063, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2311.975957] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d8c8a89-9f61-4af7-88c5-9625f073b473 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2311.983718] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-294589bb-093a-4a95-b8f0-528a0e9c449f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.014268] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7a2d145-1284-41d1-bcd8-2ae1a8f419d0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.022027] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9fa30b6-da2c-47b6-9e71-57dc34c0148c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.035383] env[63024]: DEBUG nova.compute.provider_tree [None req-142f5d35-7806-4d91-828c-559c959e28f5 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2312.203386] env[63024]: DEBUG oslo_vmware.api [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952063, 'name': PowerOnVM_Task, 'duration_secs': 0.413865} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2312.203602] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2312.203809] env[63024]: INFO nova.compute.manager [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Took 6.74 seconds to spawn the instance on the hypervisor. [ 2312.203987] env[63024]: DEBUG nova.compute.manager [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2312.204754] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-918338b5-f03e-4aa6-9840-095109589922 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2312.539098] env[63024]: DEBUG nova.scheduler.client.report [None req-142f5d35-7806-4d91-828c-559c959e28f5 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2312.723642] env[63024]: INFO nova.compute.manager [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Took 12.45 seconds to build instance. [ 2313.045067] env[63024]: DEBUG oslo_concurrency.lockutils [None req-142f5d35-7806-4d91-828c-559c959e28f5 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.616s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2313.067318] env[63024]: INFO nova.scheduler.client.report [None req-142f5d35-7806-4d91-828c-559c959e28f5 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Deleted allocations for instance 2e59f840-26bf-4192-b1ee-3645e9a64d1a [ 2313.226623] env[63024]: DEBUG oslo_concurrency.lockutils [None req-18f09fb7-4916-44fd-ac63-f8120542e6d8 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lock "fc828ce0-b08d-41f4-afb5-ea8968bbf62e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.967s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2313.575333] env[63024]: DEBUG oslo_concurrency.lockutils [None req-142f5d35-7806-4d91-828c-559c959e28f5 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Lock "2e59f840-26bf-4192-b1ee-3645e9a64d1a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.774s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2313.636618] env[63024]: DEBUG nova.compute.manager [req-98908356-d4e1-4cbb-9f40-24b6a42d4798 req-e973a8d2-5e9a-4327-8a85-3494a53cda13 service nova] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Received event network-changed-d26f8bfe-a2b6-4905-9785-1c97e45252c0 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2313.636827] env[63024]: DEBUG nova.compute.manager [req-98908356-d4e1-4cbb-9f40-24b6a42d4798 req-e973a8d2-5e9a-4327-8a85-3494a53cda13 service nova] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Refreshing instance network info cache due to event network-changed-d26f8bfe-a2b6-4905-9785-1c97e45252c0. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2313.637061] env[63024]: DEBUG oslo_concurrency.lockutils [req-98908356-d4e1-4cbb-9f40-24b6a42d4798 req-e973a8d2-5e9a-4327-8a85-3494a53cda13 service nova] Acquiring lock "refresh_cache-fc828ce0-b08d-41f4-afb5-ea8968bbf62e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2313.637206] env[63024]: DEBUG oslo_concurrency.lockutils [req-98908356-d4e1-4cbb-9f40-24b6a42d4798 req-e973a8d2-5e9a-4327-8a85-3494a53cda13 service nova] Acquired lock "refresh_cache-fc828ce0-b08d-41f4-afb5-ea8968bbf62e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2313.637368] env[63024]: DEBUG nova.network.neutron [req-98908356-d4e1-4cbb-9f40-24b6a42d4798 req-e973a8d2-5e9a-4327-8a85-3494a53cda13 service nova] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Refreshing network info cache for port d26f8bfe-a2b6-4905-9785-1c97e45252c0 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2314.341665] env[63024]: DEBUG nova.network.neutron [req-98908356-d4e1-4cbb-9f40-24b6a42d4798 req-e973a8d2-5e9a-4327-8a85-3494a53cda13 service nova] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Updated VIF entry in instance network info cache for port d26f8bfe-a2b6-4905-9785-1c97e45252c0. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2314.342051] env[63024]: DEBUG nova.network.neutron [req-98908356-d4e1-4cbb-9f40-24b6a42d4798 req-e973a8d2-5e9a-4327-8a85-3494a53cda13 service nova] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Updating instance_info_cache with network_info: [{"id": "d26f8bfe-a2b6-4905-9785-1c97e45252c0", "address": "fa:16:3e:16:2b:e6", "network": {"id": "5503f5f5-9a63-4bb8-bc39-97863100c3df", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1218611362-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd0c44555e30414c83750b762e243dc1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd26f8bfe-a2", "ovs_interfaceid": "d26f8bfe-a2b6-4905-9785-1c97e45252c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2314.452036] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e6c0b63c-6d5d-435e-a338-c00ba303ffc1 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Acquiring lock "e79d7141-00e5-40c9-a88f-244a3ae685d8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2314.452292] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e6c0b63c-6d5d-435e-a338-c00ba303ffc1 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Lock "e79d7141-00e5-40c9-a88f-244a3ae685d8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2314.452514] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e6c0b63c-6d5d-435e-a338-c00ba303ffc1 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Acquiring lock "e79d7141-00e5-40c9-a88f-244a3ae685d8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2314.452743] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e6c0b63c-6d5d-435e-a338-c00ba303ffc1 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Lock "e79d7141-00e5-40c9-a88f-244a3ae685d8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2314.452897] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e6c0b63c-6d5d-435e-a338-c00ba303ffc1 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Lock "e79d7141-00e5-40c9-a88f-244a3ae685d8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2314.455504] env[63024]: INFO nova.compute.manager [None req-e6c0b63c-6d5d-435e-a338-c00ba303ffc1 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: e79d7141-00e5-40c9-a88f-244a3ae685d8] Terminating instance [ 2314.845168] env[63024]: DEBUG oslo_concurrency.lockutils [req-98908356-d4e1-4cbb-9f40-24b6a42d4798 req-e973a8d2-5e9a-4327-8a85-3494a53cda13 service nova] Releasing lock "refresh_cache-fc828ce0-b08d-41f4-afb5-ea8968bbf62e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2314.959296] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e6c0b63c-6d5d-435e-a338-c00ba303ffc1 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Acquiring lock "refresh_cache-e79d7141-00e5-40c9-a88f-244a3ae685d8" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2314.959483] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e6c0b63c-6d5d-435e-a338-c00ba303ffc1 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Acquired lock "refresh_cache-e79d7141-00e5-40c9-a88f-244a3ae685d8" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2314.959671] env[63024]: DEBUG nova.network.neutron [None req-e6c0b63c-6d5d-435e-a338-c00ba303ffc1 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: e79d7141-00e5-40c9-a88f-244a3ae685d8] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2315.477429] env[63024]: DEBUG nova.network.neutron [None req-e6c0b63c-6d5d-435e-a338-c00ba303ffc1 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: e79d7141-00e5-40c9-a88f-244a3ae685d8] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2315.529301] env[63024]: DEBUG nova.network.neutron [None req-e6c0b63c-6d5d-435e-a338-c00ba303ffc1 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: e79d7141-00e5-40c9-a88f-244a3ae685d8] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2316.032042] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e6c0b63c-6d5d-435e-a338-c00ba303ffc1 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Releasing lock "refresh_cache-e79d7141-00e5-40c9-a88f-244a3ae685d8" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2316.032467] env[63024]: DEBUG nova.compute.manager [None req-e6c0b63c-6d5d-435e-a338-c00ba303ffc1 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: e79d7141-00e5-40c9-a88f-244a3ae685d8] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2316.032671] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e6c0b63c-6d5d-435e-a338-c00ba303ffc1 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: e79d7141-00e5-40c9-a88f-244a3ae685d8] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2316.033596] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5519fb4b-1ae2-4a10-81ec-06929ad0574c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2316.041471] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6c0b63c-6d5d-435e-a338-c00ba303ffc1 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: e79d7141-00e5-40c9-a88f-244a3ae685d8] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2316.041689] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-79c8a218-4f10-400f-b955-863039dcbb49 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2316.047571] env[63024]: DEBUG oslo_vmware.api [None req-e6c0b63c-6d5d-435e-a338-c00ba303ffc1 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Waiting for the task: (returnval){ [ 2316.047571] env[63024]: value = "task-1952064" [ 2316.047571] env[63024]: _type = "Task" [ 2316.047571] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2316.055400] env[63024]: DEBUG oslo_vmware.api [None req-e6c0b63c-6d5d-435e-a338-c00ba303ffc1 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952064, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2316.557109] env[63024]: DEBUG oslo_vmware.api [None req-e6c0b63c-6d5d-435e-a338-c00ba303ffc1 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952064, 'name': PowerOffVM_Task, 'duration_secs': 0.115157} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2316.557513] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6c0b63c-6d5d-435e-a338-c00ba303ffc1 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: e79d7141-00e5-40c9-a88f-244a3ae685d8] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2316.557627] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e6c0b63c-6d5d-435e-a338-c00ba303ffc1 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: e79d7141-00e5-40c9-a88f-244a3ae685d8] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2316.557917] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ef079917-e59e-42e4-89cf-a67c9b13bdbf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2316.581891] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e6c0b63c-6d5d-435e-a338-c00ba303ffc1 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: e79d7141-00e5-40c9-a88f-244a3ae685d8] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2316.582165] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e6c0b63c-6d5d-435e-a338-c00ba303ffc1 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: e79d7141-00e5-40c9-a88f-244a3ae685d8] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2316.582408] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6c0b63c-6d5d-435e-a338-c00ba303ffc1 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Deleting the datastore file [datastore1] e79d7141-00e5-40c9-a88f-244a3ae685d8 {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2316.582659] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-649e442d-2440-4e2e-9ddc-5945f77183d4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2316.589355] env[63024]: DEBUG oslo_vmware.api [None req-e6c0b63c-6d5d-435e-a338-c00ba303ffc1 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Waiting for the task: (returnval){ [ 2316.589355] env[63024]: value = "task-1952066" [ 2316.589355] env[63024]: _type = "Task" [ 2316.589355] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2316.596538] env[63024]: DEBUG oslo_vmware.api [None req-e6c0b63c-6d5d-435e-a338-c00ba303ffc1 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952066, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2317.099141] env[63024]: DEBUG oslo_vmware.api [None req-e6c0b63c-6d5d-435e-a338-c00ba303ffc1 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Task: {'id': task-1952066, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.091652} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2317.099403] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6c0b63c-6d5d-435e-a338-c00ba303ffc1 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2317.099592] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e6c0b63c-6d5d-435e-a338-c00ba303ffc1 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: e79d7141-00e5-40c9-a88f-244a3ae685d8] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2317.099769] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-e6c0b63c-6d5d-435e-a338-c00ba303ffc1 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: e79d7141-00e5-40c9-a88f-244a3ae685d8] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2317.099944] env[63024]: INFO nova.compute.manager [None req-e6c0b63c-6d5d-435e-a338-c00ba303ffc1 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] [instance: e79d7141-00e5-40c9-a88f-244a3ae685d8] Took 1.07 seconds to destroy the instance on the hypervisor. [ 2317.100199] env[63024]: DEBUG oslo.service.loopingcall [None req-e6c0b63c-6d5d-435e-a338-c00ba303ffc1 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2317.100389] env[63024]: DEBUG nova.compute.manager [-] [instance: e79d7141-00e5-40c9-a88f-244a3ae685d8] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2317.100486] env[63024]: DEBUG nova.network.neutron [-] [instance: e79d7141-00e5-40c9-a88f-244a3ae685d8] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2317.114808] env[63024]: DEBUG nova.network.neutron [-] [instance: e79d7141-00e5-40c9-a88f-244a3ae685d8] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2317.616938] env[63024]: DEBUG nova.network.neutron [-] [instance: e79d7141-00e5-40c9-a88f-244a3ae685d8] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2318.119696] env[63024]: INFO nova.compute.manager [-] [instance: e79d7141-00e5-40c9-a88f-244a3ae685d8] Took 1.02 seconds to deallocate network for instance. [ 2318.626798] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e6c0b63c-6d5d-435e-a338-c00ba303ffc1 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2318.627176] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e6c0b63c-6d5d-435e-a338-c00ba303ffc1 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2318.627306] env[63024]: DEBUG nova.objects.instance [None req-e6c0b63c-6d5d-435e-a338-c00ba303ffc1 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Lazy-loading 'resources' on Instance uuid e79d7141-00e5-40c9-a88f-244a3ae685d8 {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2319.173099] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cfad345-7b0e-4017-ae40-62bb01b27185 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.180542] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10fad434-ae39-42fb-ad50-b8c4bd000aae {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.209936] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36790ca3-c78d-4ae4-ab52-f5ac0c6f7e14 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.216954] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0795ebfe-9296-4c35-99e9-c55aa197244b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2319.229670] env[63024]: DEBUG nova.compute.provider_tree [None req-e6c0b63c-6d5d-435e-a338-c00ba303ffc1 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2319.733030] env[63024]: DEBUG nova.scheduler.client.report [None req-e6c0b63c-6d5d-435e-a338-c00ba303ffc1 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2320.238666] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e6c0b63c-6d5d-435e-a338-c00ba303ffc1 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.611s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2320.256667] env[63024]: INFO nova.scheduler.client.report [None req-e6c0b63c-6d5d-435e-a338-c00ba303ffc1 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Deleted allocations for instance e79d7141-00e5-40c9-a88f-244a3ae685d8 [ 2320.763850] env[63024]: DEBUG oslo_concurrency.lockutils [None req-e6c0b63c-6d5d-435e-a338-c00ba303ffc1 tempest-ServerShowV247Test-235399518 tempest-ServerShowV247Test-235399518-project-member] Lock "e79d7141-00e5-40c9-a88f-244a3ae685d8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.311s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2321.887007] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Acquiring lock "9b3a4e94-a40b-4498-8aae-f72cc9acdeef" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2321.887319] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Lock "9b3a4e94-a40b-4498-8aae-f72cc9acdeef" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2322.390571] env[63024]: DEBUG nova.compute.manager [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Starting instance... {{(pid=63024) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 2322.909708] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2322.909972] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2322.911417] env[63024]: INFO nova.compute.claims [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2323.956023] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-947afcae-ab25-4ed5-83e8-06310b883032 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2323.962369] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dbafdf6-b556-427f-a825-dd16c8d289fe {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2323.991324] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c53aa57f-abfc-441c-8feb-6f29542bec3d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2323.998259] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f717d538-843d-44f1-b961-88c036226cce {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2324.012304] env[63024]: DEBUG nova.compute.provider_tree [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2324.515268] env[63024]: DEBUG nova.scheduler.client.report [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2325.020744] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.111s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2325.021332] env[63024]: DEBUG nova.compute.manager [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Start building networks asynchronously for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 2325.526643] env[63024]: DEBUG nova.compute.utils [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Using /dev/sd instead of None {{(pid=63024) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2325.528131] env[63024]: DEBUG nova.compute.manager [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Not allocating networking since 'none' was specified. {{(pid=63024) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 2326.029624] env[63024]: DEBUG nova.compute.manager [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Start building block device mappings for instance. {{(pid=63024) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 2327.039014] env[63024]: DEBUG nova.compute.manager [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Start spawning the instance on the hypervisor. {{(pid=63024) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 2327.069574] env[63024]: DEBUG nova.virt.hardware [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2327.069824] env[63024]: DEBUG nova.virt.hardware [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2327.069984] env[63024]: DEBUG nova.virt.hardware [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2327.070185] env[63024]: DEBUG nova.virt.hardware [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2327.070337] env[63024]: DEBUG nova.virt.hardware [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2327.070485] env[63024]: DEBUG nova.virt.hardware [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2327.070693] env[63024]: DEBUG nova.virt.hardware [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2327.070854] env[63024]: DEBUG nova.virt.hardware [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2327.071168] env[63024]: DEBUG nova.virt.hardware [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2327.071351] env[63024]: DEBUG nova.virt.hardware [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2327.071530] env[63024]: DEBUG nova.virt.hardware [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2327.072393] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2840dd65-90ad-4e9f-abc3-defcf8b3e978 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2327.080633] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e41916b7-46c0-4c5d-b90c-18e2847fac90 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2327.093561] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Instance VIF info [] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2327.098852] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Creating folder: Project (c578538cde654bb7bc229542b86e1297). Parent ref: group-v401959. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2327.099112] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-77635357-3d37-461d-95b5-8dd0c27e097f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2327.109477] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Created folder: Project (c578538cde654bb7bc229542b86e1297) in parent group-v401959. [ 2327.109656] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Creating folder: Instances. Parent ref: group-v402290. {{(pid=63024) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 2327.109858] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b4976dee-d274-47e8-be49-9293995f35b2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2327.118923] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Created folder: Instances in parent group-v402290. [ 2327.119158] env[63024]: DEBUG oslo.service.loopingcall [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2327.119337] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2327.119520] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-63fdc0a0-0b73-4a80-86cd-069bd3998ece {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2327.135191] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2327.135191] env[63024]: value = "task-1952069" [ 2327.135191] env[63024]: _type = "Task" [ 2327.135191] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2327.143427] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1952069, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2327.645668] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1952069, 'name': CreateVM_Task, 'duration_secs': 0.250277} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2327.645844] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2327.646282] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2327.646446] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2327.646796] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2327.647051] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-010f3e83-8a13-4d27-894d-c07264bbf513 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2327.651359] env[63024]: DEBUG oslo_vmware.api [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Waiting for the task: (returnval){ [ 2327.651359] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52a1e335-2c6b-0bad-88ac-c34a0c00461e" [ 2327.651359] env[63024]: _type = "Task" [ 2327.651359] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2327.658634] env[63024]: DEBUG oslo_vmware.api [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52a1e335-2c6b-0bad-88ac-c34a0c00461e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2328.160977] env[63024]: DEBUG oslo_vmware.api [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52a1e335-2c6b-0bad-88ac-c34a0c00461e, 'name': SearchDatastore_Task, 'duration_secs': 0.009743} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2328.161379] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2328.161501] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2328.161730] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2328.161885] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2328.162083] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2328.162342] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e2e75297-62ce-468d-819f-098ef197252d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2328.170252] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2328.170416] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2328.171139] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b403f693-0aa2-47c2-971a-53cdbd455d68 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2328.177216] env[63024]: DEBUG oslo_vmware.api [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Waiting for the task: (returnval){ [ 2328.177216] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52a212f5-8aaf-1902-25fe-ebc10a8c842e" [ 2328.177216] env[63024]: _type = "Task" [ 2328.177216] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2328.184207] env[63024]: DEBUG oslo_vmware.api [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52a212f5-8aaf-1902-25fe-ebc10a8c842e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2328.687771] env[63024]: DEBUG oslo_vmware.api [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52a212f5-8aaf-1902-25fe-ebc10a8c842e, 'name': SearchDatastore_Task, 'duration_secs': 0.007778} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2328.688544] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20d3bd3e-0e9a-405e-bf0a-8025064584c8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2328.693425] env[63024]: DEBUG oslo_vmware.api [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Waiting for the task: (returnval){ [ 2328.693425] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52875d29-21d7-b50d-2824-35725aca12c4" [ 2328.693425] env[63024]: _type = "Task" [ 2328.693425] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2328.700516] env[63024]: DEBUG oslo_vmware.api [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52875d29-21d7-b50d-2824-35725aca12c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2329.203341] env[63024]: DEBUG oslo_vmware.api [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52875d29-21d7-b50d-2824-35725aca12c4, 'name': SearchDatastore_Task, 'duration_secs': 0.009113} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2329.203686] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2329.203823] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 9b3a4e94-a40b-4498-8aae-f72cc9acdeef/9b3a4e94-a40b-4498-8aae-f72cc9acdeef.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2329.204076] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e2effb4b-5105-4c0c-99aa-956fd003e7fa {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2329.210919] env[63024]: DEBUG oslo_vmware.api [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Waiting for the task: (returnval){ [ 2329.210919] env[63024]: value = "task-1952070" [ 2329.210919] env[63024]: _type = "Task" [ 2329.210919] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2329.217955] env[63024]: DEBUG oslo_vmware.api [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Task: {'id': task-1952070, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2329.721195] env[63024]: DEBUG oslo_vmware.api [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Task: {'id': task-1952070, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.42676} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2329.721462] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 9b3a4e94-a40b-4498-8aae-f72cc9acdeef/9b3a4e94-a40b-4498-8aae-f72cc9acdeef.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2329.721683] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2329.721929] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-43ea8225-4977-4bcd-be84-496d41ae8997 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2329.729331] env[63024]: DEBUG oslo_vmware.api [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Waiting for the task: (returnval){ [ 2329.729331] env[63024]: value = "task-1952071" [ 2329.729331] env[63024]: _type = "Task" [ 2329.729331] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2329.736457] env[63024]: DEBUG oslo_vmware.api [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Task: {'id': task-1952071, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2330.239068] env[63024]: DEBUG oslo_vmware.api [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Task: {'id': task-1952071, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.055312} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2330.239393] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2330.240071] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ba69d18-087a-4437-8efc-02f132631c05 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.259168] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Reconfiguring VM instance instance-0000007e to attach disk [datastore1] 9b3a4e94-a40b-4498-8aae-f72cc9acdeef/9b3a4e94-a40b-4498-8aae-f72cc9acdeef.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2330.259393] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4db39469-7c1d-4595-9378-65b29bd8fca8 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2330.278422] env[63024]: DEBUG oslo_vmware.api [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Waiting for the task: (returnval){ [ 2330.278422] env[63024]: value = "task-1952072" [ 2330.278422] env[63024]: _type = "Task" [ 2330.278422] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2330.286443] env[63024]: DEBUG oslo_vmware.api [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Task: {'id': task-1952072, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2330.787417] env[63024]: DEBUG oslo_vmware.api [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Task: {'id': task-1952072, 'name': ReconfigVM_Task} progress is 14%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2331.288114] env[63024]: DEBUG oslo_vmware.api [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Task: {'id': task-1952072, 'name': ReconfigVM_Task, 'duration_secs': 0.776533} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2331.288488] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Reconfigured VM instance instance-0000007e to attach disk [datastore1] 9b3a4e94-a40b-4498-8aae-f72cc9acdeef/9b3a4e94-a40b-4498-8aae-f72cc9acdeef.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2331.288939] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d2631931-7b49-4c70-9708-a4c3e87c2d1d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2331.295484] env[63024]: DEBUG oslo_vmware.api [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Waiting for the task: (returnval){ [ 2331.295484] env[63024]: value = "task-1952073" [ 2331.295484] env[63024]: _type = "Task" [ 2331.295484] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2331.302499] env[63024]: DEBUG oslo_vmware.api [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Task: {'id': task-1952073, 'name': Rename_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2331.805166] env[63024]: DEBUG oslo_vmware.api [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Task: {'id': task-1952073, 'name': Rename_Task, 'duration_secs': 0.120366} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2331.805431] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2331.805666] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1ca7e4c3-a210-48a5-97ed-e2aaee7fcb2c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2331.811621] env[63024]: DEBUG oslo_vmware.api [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Waiting for the task: (returnval){ [ 2331.811621] env[63024]: value = "task-1952074" [ 2331.811621] env[63024]: _type = "Task" [ 2331.811621] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2331.818669] env[63024]: DEBUG oslo_vmware.api [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Task: {'id': task-1952074, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2332.321261] env[63024]: DEBUG oslo_vmware.api [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Task: {'id': task-1952074, 'name': PowerOnVM_Task, 'duration_secs': 0.395729} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2332.321623] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2332.321753] env[63024]: INFO nova.compute.manager [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Took 5.28 seconds to spawn the instance on the hypervisor. [ 2332.321889] env[63024]: DEBUG nova.compute.manager [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2332.322645] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c5b1221-6da0-4591-9c83-eff217812f07 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2332.842530] env[63024]: INFO nova.compute.manager [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Took 9.95 seconds to build instance. [ 2333.344705] env[63024]: DEBUG oslo_concurrency.lockutils [None req-7affecac-8250-488e-9bc6-311801c33e86 tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Lock "9b3a4e94-a40b-4498-8aae-f72cc9acdeef" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 11.457s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2333.625054] env[63024]: INFO nova.compute.manager [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Rebuilding instance [ 2333.664190] env[63024]: DEBUG nova.compute.manager [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2333.665117] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43c32aec-f4be-4741-a93a-73ddf54419df {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.677501] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2334.677941] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8e0d3dfd-bc74-4c3a-8c73-b74b35e8b208 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.686022] env[63024]: DEBUG oslo_vmware.api [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Waiting for the task: (returnval){ [ 2334.686022] env[63024]: value = "task-1952075" [ 2334.686022] env[63024]: _type = "Task" [ 2334.686022] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2334.694398] env[63024]: DEBUG oslo_vmware.api [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Task: {'id': task-1952075, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2335.195789] env[63024]: DEBUG oslo_vmware.api [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Task: {'id': task-1952075, 'name': PowerOffVM_Task, 'duration_secs': 0.204733} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2335.196069] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2335.196306] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2335.197119] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d87ba1d-8145-49cc-b780-7bc772e9c370 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.203636] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2335.203868] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-96b5df58-c9d6-4c9f-a0a9-0cb35e68d9c4 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.231014] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2335.231300] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2335.231425] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Deleting the datastore file [datastore1] 9b3a4e94-a40b-4498-8aae-f72cc9acdeef {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2335.231668] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b986b9b4-037f-4745-bbcd-22640c2728e5 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.238512] env[63024]: DEBUG oslo_vmware.api [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Waiting for the task: (returnval){ [ 2335.238512] env[63024]: value = "task-1952077" [ 2335.238512] env[63024]: _type = "Task" [ 2335.238512] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2335.245716] env[63024]: DEBUG oslo_vmware.api [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Task: {'id': task-1952077, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2335.748757] env[63024]: DEBUG oslo_vmware.api [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Task: {'id': task-1952077, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.084554} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2335.749128] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2335.749167] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2335.749334] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2336.777729] env[63024]: DEBUG nova.virt.hardware [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-12-22T10:49:24Z,direct_url=,disk_format='vmdk',id=2646ca61-612e-4bc3-97f7-ee492c048835,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='d78401abb63840f4b461856cfdb6dbbb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-12-22T10:49:25Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2336.778041] env[63024]: DEBUG nova.virt.hardware [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2336.778180] env[63024]: DEBUG nova.virt.hardware [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2336.778364] env[63024]: DEBUG nova.virt.hardware [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2336.778510] env[63024]: DEBUG nova.virt.hardware [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2336.778655] env[63024]: DEBUG nova.virt.hardware [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2336.778858] env[63024]: DEBUG nova.virt.hardware [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2336.779047] env[63024]: DEBUG nova.virt.hardware [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2336.779234] env[63024]: DEBUG nova.virt.hardware [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2336.779400] env[63024]: DEBUG nova.virt.hardware [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2336.779573] env[63024]: DEBUG nova.virt.hardware [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2336.780423] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8187fcd0-d6f8-4eef-a26d-097afe275ccd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2336.788398] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c3336c8-ead4-41f1-b6ab-a0e2bf9bf004 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2336.801811] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Instance VIF info [] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2336.807241] env[63024]: DEBUG oslo.service.loopingcall [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2336.807470] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2336.807665] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-307fe86e-b298-4b75-b33b-d5ad2a875978 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2336.824414] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2336.824414] env[63024]: value = "task-1952078" [ 2336.824414] env[63024]: _type = "Task" [ 2336.824414] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2336.831460] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1952078, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2337.334322] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1952078, 'name': CreateVM_Task, 'duration_secs': 0.24202} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2337.334500] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2337.334924] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2337.335105] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2337.335429] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2337.335673] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b16ea24b-ef83-4477-9f0c-381826a533e6 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2337.340024] env[63024]: DEBUG oslo_vmware.api [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Waiting for the task: (returnval){ [ 2337.340024] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]523bebab-df0f-70eb-9d7c-3f0a9174ba95" [ 2337.340024] env[63024]: _type = "Task" [ 2337.340024] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2337.347402] env[63024]: DEBUG oslo_vmware.api [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]523bebab-df0f-70eb-9d7c-3f0a9174ba95, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2337.850048] env[63024]: DEBUG oslo_vmware.api [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]523bebab-df0f-70eb-9d7c-3f0a9174ba95, 'name': SearchDatastore_Task, 'duration_secs': 0.009105} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2337.850431] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2337.850580] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Processing image 2646ca61-612e-4bc3-97f7-ee492c048835 {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2337.850806] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2337.850955] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Acquired lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2337.851151] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2337.851407] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-88152fca-b288-4b1b-bff0-49fda618925d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2337.858895] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2337.859083] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2337.859721] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c753ff63-1346-4da0-bb05-368d993066ab {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2337.864396] env[63024]: DEBUG oslo_vmware.api [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Waiting for the task: (returnval){ [ 2337.864396] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]522b428f-0378-c86b-10e2-fc09b8158023" [ 2337.864396] env[63024]: _type = "Task" [ 2337.864396] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2337.871177] env[63024]: DEBUG oslo_vmware.api [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]522b428f-0378-c86b-10e2-fc09b8158023, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2338.375021] env[63024]: DEBUG oslo_vmware.api [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]522b428f-0378-c86b-10e2-fc09b8158023, 'name': SearchDatastore_Task, 'duration_secs': 0.007983} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2338.375749] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ebab05a-c708-452b-a8b8-2ebcd8b7a40a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2338.380674] env[63024]: DEBUG oslo_vmware.api [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Waiting for the task: (returnval){ [ 2338.380674] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]526e8677-3ecf-e38d-a03d-66c00d783cf6" [ 2338.380674] env[63024]: _type = "Task" [ 2338.380674] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2338.387889] env[63024]: DEBUG oslo_vmware.api [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]526e8677-3ecf-e38d-a03d-66c00d783cf6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2338.890647] env[63024]: DEBUG oslo_vmware.api [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]526e8677-3ecf-e38d-a03d-66c00d783cf6, 'name': SearchDatastore_Task, 'duration_secs': 0.009253} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2338.890924] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Releasing lock "[datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2338.891203] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 9b3a4e94-a40b-4498-8aae-f72cc9acdeef/9b3a4e94-a40b-4498-8aae-f72cc9acdeef.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2338.891463] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cf795879-b49c-4a00-a1ad-1fa7514491ab {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2338.898181] env[63024]: DEBUG oslo_vmware.api [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Waiting for the task: (returnval){ [ 2338.898181] env[63024]: value = "task-1952079" [ 2338.898181] env[63024]: _type = "Task" [ 2338.898181] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2338.905359] env[63024]: DEBUG oslo_vmware.api [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Task: {'id': task-1952079, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2339.408286] env[63024]: DEBUG oslo_vmware.api [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Task: {'id': task-1952079, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.430562} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2339.408569] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/2646ca61-612e-4bc3-97f7-ee492c048835/2646ca61-612e-4bc3-97f7-ee492c048835.vmdk to [datastore1] 9b3a4e94-a40b-4498-8aae-f72cc9acdeef/9b3a4e94-a40b-4498-8aae-f72cc9acdeef.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2339.408789] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Extending root virtual disk to 1048576 {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 2339.409055] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-421617ba-d9d9-4ff0-be7a-961952a3706f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2339.416767] env[63024]: DEBUG oslo_vmware.api [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Waiting for the task: (returnval){ [ 2339.416767] env[63024]: value = "task-1952080" [ 2339.416767] env[63024]: _type = "Task" [ 2339.416767] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2339.424029] env[63024]: DEBUG oslo_vmware.api [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Task: {'id': task-1952080, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2339.926521] env[63024]: DEBUG oslo_vmware.api [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Task: {'id': task-1952080, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069175} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2339.926840] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Extended root virtual disk {{(pid=63024) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 2339.927568] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21c43871-38db-4a16-b30e-17ad84702933 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2339.946379] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Reconfiguring VM instance instance-0000007e to attach disk [datastore1] 9b3a4e94-a40b-4498-8aae-f72cc9acdeef/9b3a4e94-a40b-4498-8aae-f72cc9acdeef.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2339.946613] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0bc87ea7-f33b-4316-a8f3-b12e4fe0d75b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2339.965667] env[63024]: DEBUG oslo_vmware.api [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Waiting for the task: (returnval){ [ 2339.965667] env[63024]: value = "task-1952081" [ 2339.965667] env[63024]: _type = "Task" [ 2339.965667] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2339.975901] env[63024]: DEBUG oslo_vmware.api [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Task: {'id': task-1952081, 'name': ReconfigVM_Task} progress is 6%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2340.475141] env[63024]: DEBUG oslo_vmware.api [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Task: {'id': task-1952081, 'name': ReconfigVM_Task, 'duration_secs': 0.263696} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2340.475431] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Reconfigured VM instance instance-0000007e to attach disk [datastore1] 9b3a4e94-a40b-4498-8aae-f72cc9acdeef/9b3a4e94-a40b-4498-8aae-f72cc9acdeef.vmdk or device None with type sparse {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2340.476066] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-eead1ead-3d99-40ee-b6c8-d530ccae334d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2340.482649] env[63024]: DEBUG oslo_vmware.api [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Waiting for the task: (returnval){ [ 2340.482649] env[63024]: value = "task-1952082" [ 2340.482649] env[63024]: _type = "Task" [ 2340.482649] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2340.490255] env[63024]: DEBUG oslo_vmware.api [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Task: {'id': task-1952082, 'name': Rename_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2340.992868] env[63024]: DEBUG oslo_vmware.api [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Task: {'id': task-1952082, 'name': Rename_Task, 'duration_secs': 0.1215} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2340.993247] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2340.993396] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cb5eba2b-d2be-4f12-a558-166f2137971d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2340.999406] env[63024]: DEBUG oslo_vmware.api [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Waiting for the task: (returnval){ [ 2340.999406] env[63024]: value = "task-1952083" [ 2340.999406] env[63024]: _type = "Task" [ 2340.999406] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2341.006507] env[63024]: DEBUG oslo_vmware.api [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Task: {'id': task-1952083, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2341.508841] env[63024]: DEBUG oslo_vmware.api [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Task: {'id': task-1952083, 'name': PowerOnVM_Task, 'duration_secs': 0.37972} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2341.509125] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2341.509338] env[63024]: DEBUG nova.compute.manager [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2341.510081] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2abde9b7-9cf7-4c02-b126-a2bdeb2217bd {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2342.026832] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2342.027123] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2342.027310] env[63024]: DEBUG nova.objects.instance [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Trying to apply a migration context that does not seem to be set for this instance {{(pid=63024) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 2342.200313] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f13d771d-dcbb-489a-9434-5e525efa1c5d tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Acquiring lock "9b3a4e94-a40b-4498-8aae-f72cc9acdeef" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2342.200580] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f13d771d-dcbb-489a-9434-5e525efa1c5d tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Lock "9b3a4e94-a40b-4498-8aae-f72cc9acdeef" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2342.200794] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f13d771d-dcbb-489a-9434-5e525efa1c5d tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Acquiring lock "9b3a4e94-a40b-4498-8aae-f72cc9acdeef-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2342.200981] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f13d771d-dcbb-489a-9434-5e525efa1c5d tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Lock "9b3a4e94-a40b-4498-8aae-f72cc9acdeef-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2342.201172] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f13d771d-dcbb-489a-9434-5e525efa1c5d tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Lock "9b3a4e94-a40b-4498-8aae-f72cc9acdeef-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2342.203417] env[63024]: INFO nova.compute.manager [None req-f13d771d-dcbb-489a-9434-5e525efa1c5d tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Terminating instance [ 2342.706417] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f13d771d-dcbb-489a-9434-5e525efa1c5d tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Acquiring lock "refresh_cache-9b3a4e94-a40b-4498-8aae-f72cc9acdeef" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2342.706566] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f13d771d-dcbb-489a-9434-5e525efa1c5d tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Acquired lock "refresh_cache-9b3a4e94-a40b-4498-8aae-f72cc9acdeef" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2342.706762] env[63024]: DEBUG nova.network.neutron [None req-f13d771d-dcbb-489a-9434-5e525efa1c5d tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2343.035524] env[63024]: DEBUG oslo_concurrency.lockutils [None req-9b0a1e64-bfb7-4850-8c8a-d55b7862abde tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.008s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2343.225540] env[63024]: DEBUG nova.network.neutron [None req-f13d771d-dcbb-489a-9434-5e525efa1c5d tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2343.285822] env[63024]: DEBUG nova.network.neutron [None req-f13d771d-dcbb-489a-9434-5e525efa1c5d tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2343.788416] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f13d771d-dcbb-489a-9434-5e525efa1c5d tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Releasing lock "refresh_cache-9b3a4e94-a40b-4498-8aae-f72cc9acdeef" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2343.788867] env[63024]: DEBUG nova.compute.manager [None req-f13d771d-dcbb-489a-9434-5e525efa1c5d tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2343.789078] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f13d771d-dcbb-489a-9434-5e525efa1c5d tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2343.789975] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-003dfad4-aa71-456e-80ee-8ce09b6e0536 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2343.798326] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-f13d771d-dcbb-489a-9434-5e525efa1c5d tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2343.798580] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cbfafa40-9a17-4ef0-b5b5-2ecc4eaaab78 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2343.809554] env[63024]: DEBUG oslo_vmware.api [None req-f13d771d-dcbb-489a-9434-5e525efa1c5d tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Waiting for the task: (returnval){ [ 2343.809554] env[63024]: value = "task-1952084" [ 2343.809554] env[63024]: _type = "Task" [ 2343.809554] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2343.819092] env[63024]: DEBUG oslo_vmware.api [None req-f13d771d-dcbb-489a-9434-5e525efa1c5d tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Task: {'id': task-1952084, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2344.319205] env[63024]: DEBUG oslo_vmware.api [None req-f13d771d-dcbb-489a-9434-5e525efa1c5d tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Task: {'id': task-1952084, 'name': PowerOffVM_Task, 'duration_secs': 0.169447} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2344.319653] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-f13d771d-dcbb-489a-9434-5e525efa1c5d tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2344.319653] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f13d771d-dcbb-489a-9434-5e525efa1c5d tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2344.319896] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-698d2670-8e3c-4190-b3fb-1c5510d427da {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2344.349432] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f13d771d-dcbb-489a-9434-5e525efa1c5d tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2344.349644] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f13d771d-dcbb-489a-9434-5e525efa1c5d tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2344.349827] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-f13d771d-dcbb-489a-9434-5e525efa1c5d tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Deleting the datastore file [datastore1] 9b3a4e94-a40b-4498-8aae-f72cc9acdeef {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2344.350164] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-57d77635-221a-4a98-9f83-b43653fe9410 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2344.357362] env[63024]: DEBUG oslo_vmware.api [None req-f13d771d-dcbb-489a-9434-5e525efa1c5d tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Waiting for the task: (returnval){ [ 2344.357362] env[63024]: value = "task-1952086" [ 2344.357362] env[63024]: _type = "Task" [ 2344.357362] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2344.365490] env[63024]: DEBUG oslo_vmware.api [None req-f13d771d-dcbb-489a-9434-5e525efa1c5d tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Task: {'id': task-1952086, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2344.867731] env[63024]: DEBUG oslo_vmware.api [None req-f13d771d-dcbb-489a-9434-5e525efa1c5d tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Task: {'id': task-1952086, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.111273} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2344.867998] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-f13d771d-dcbb-489a-9434-5e525efa1c5d tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2344.868190] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f13d771d-dcbb-489a-9434-5e525efa1c5d tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2344.868374] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-f13d771d-dcbb-489a-9434-5e525efa1c5d tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2344.868550] env[63024]: INFO nova.compute.manager [None req-f13d771d-dcbb-489a-9434-5e525efa1c5d tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Took 1.08 seconds to destroy the instance on the hypervisor. [ 2344.868807] env[63024]: DEBUG oslo.service.loopingcall [None req-f13d771d-dcbb-489a-9434-5e525efa1c5d tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2344.868997] env[63024]: DEBUG nova.compute.manager [-] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2344.869132] env[63024]: DEBUG nova.network.neutron [-] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2344.885043] env[63024]: DEBUG nova.network.neutron [-] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Instance cache missing network info. {{(pid=63024) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 2345.387684] env[63024]: DEBUG nova.network.neutron [-] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2345.890329] env[63024]: INFO nova.compute.manager [-] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Took 1.02 seconds to deallocate network for instance. [ 2346.396533] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f13d771d-dcbb-489a-9434-5e525efa1c5d tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2346.396921] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f13d771d-dcbb-489a-9434-5e525efa1c5d tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2346.397118] env[63024]: DEBUG nova.objects.instance [None req-f13d771d-dcbb-489a-9434-5e525efa1c5d tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Lazy-loading 'resources' on Instance uuid 9b3a4e94-a40b-4498-8aae-f72cc9acdeef {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2346.942424] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fb2abde-7c50-451b-a302-8e3e164cfae2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2346.952559] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6b87c5c-6fc6-43e5-bcd0-61169ca45f27 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2346.998810] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e42a2f89-dd85-4bc1-adb0-594b27549b5a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2347.008707] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4f4565b-a3a5-4224-86b3-72eca2ccfa0c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2347.027651] env[63024]: DEBUG nova.compute.provider_tree [None req-f13d771d-dcbb-489a-9434-5e525efa1c5d tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2347.531273] env[63024]: DEBUG nova.scheduler.client.report [None req-f13d771d-dcbb-489a-9434-5e525efa1c5d tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2348.036619] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f13d771d-dcbb-489a-9434-5e525efa1c5d tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.640s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2348.058997] env[63024]: INFO nova.scheduler.client.report [None req-f13d771d-dcbb-489a-9434-5e525efa1c5d tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Deleted allocations for instance 9b3a4e94-a40b-4498-8aae-f72cc9acdeef [ 2348.566620] env[63024]: DEBUG oslo_concurrency.lockutils [None req-f13d771d-dcbb-489a-9434-5e525efa1c5d tempest-ServerShowV257Test-1535112678 tempest-ServerShowV257Test-1535112678-project-member] Lock "9b3a4e94-a40b-4498-8aae-f72cc9acdeef" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.366s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2352.631112] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquiring lock "fc828ce0-b08d-41f4-afb5-ea8968bbf62e" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2352.631606] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lock "fc828ce0-b08d-41f4-afb5-ea8968bbf62e" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2352.631606] env[63024]: INFO nova.compute.manager [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Shelving [ 2353.643484] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2353.643840] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1e93dbc2-41f7-4acc-8bcc-288f16911cee {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2353.651252] env[63024]: DEBUG oslo_vmware.api [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2353.651252] env[63024]: value = "task-1952087" [ 2353.651252] env[63024]: _type = "Task" [ 2353.651252] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2353.658981] env[63024]: DEBUG oslo_vmware.api [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952087, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2354.161020] env[63024]: DEBUG oslo_vmware.api [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952087, 'name': PowerOffVM_Task, 'duration_secs': 0.166526} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2354.161307] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2354.162092] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f13c323e-7067-4e24-b727-e2c62228cb58 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2354.179928] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-213aaadf-41ae-4424-8d84-f7f5ec8f6202 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2354.690118] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Creating Snapshot of the VM instance {{(pid=63024) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 2354.690118] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-04446c21-8474-4869-8aab-4009a15ea63d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2354.698295] env[63024]: DEBUG oslo_vmware.api [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2354.698295] env[63024]: value = "task-1952088" [ 2354.698295] env[63024]: _type = "Task" [ 2354.698295] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2354.706950] env[63024]: DEBUG oslo_vmware.api [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952088, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2355.208407] env[63024]: DEBUG oslo_vmware.api [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952088, 'name': CreateSnapshot_Task, 'duration_secs': 0.386052} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2355.208682] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Created Snapshot of the VM instance {{(pid=63024) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 2355.209404] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0c0cae8-6911-413d-853a-2b15dc879555 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2355.727053] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Creating linked-clone VM from snapshot {{(pid=63024) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 2355.727417] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-c46700b6-36f8-42f2-828a-3e1219f6d491 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2355.736638] env[63024]: DEBUG oslo_vmware.api [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2355.736638] env[63024]: value = "task-1952089" [ 2355.736638] env[63024]: _type = "Task" [ 2355.736638] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2355.745565] env[63024]: DEBUG oslo_vmware.api [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952089, 'name': CloneVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2356.246441] env[63024]: DEBUG oslo_vmware.api [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952089, 'name': CloneVM_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2356.746750] env[63024]: DEBUG oslo_vmware.api [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952089, 'name': CloneVM_Task} progress is 100%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2357.247717] env[63024]: DEBUG oslo_vmware.api [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952089, 'name': CloneVM_Task, 'duration_secs': 1.078647} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2357.247982] env[63024]: INFO nova.virt.vmwareapi.vmops [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Created linked-clone VM from snapshot [ 2357.248702] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-774f11f3-0531-421c-b160-956ceda9f5dc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2357.255884] env[63024]: DEBUG nova.virt.vmwareapi.images [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Uploading image fdbe0da7-8788-4622-a2dd-50aa8a0cccba {{(pid=63024) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 2357.276650] env[63024]: DEBUG oslo_vmware.rw_handles [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 2357.276650] env[63024]: value = "vm-402295" [ 2357.276650] env[63024]: _type = "VirtualMachine" [ 2357.276650] env[63024]: }. {{(pid=63024) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 2357.276952] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-1e3eb792-5201-4556-b094-94d77f116c73 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2357.284275] env[63024]: DEBUG oslo_vmware.rw_handles [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lease: (returnval){ [ 2357.284275] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]523badec-0611-d18f-8799-7e686d5c0fa0" [ 2357.284275] env[63024]: _type = "HttpNfcLease" [ 2357.284275] env[63024]: } obtained for exporting VM: (result){ [ 2357.284275] env[63024]: value = "vm-402295" [ 2357.284275] env[63024]: _type = "VirtualMachine" [ 2357.284275] env[63024]: }. {{(pid=63024) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 2357.284504] env[63024]: DEBUG oslo_vmware.api [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the lease: (returnval){ [ 2357.284504] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]523badec-0611-d18f-8799-7e686d5c0fa0" [ 2357.284504] env[63024]: _type = "HttpNfcLease" [ 2357.284504] env[63024]: } to be ready. {{(pid=63024) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2357.290722] env[63024]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2357.290722] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]523badec-0611-d18f-8799-7e686d5c0fa0" [ 2357.290722] env[63024]: _type = "HttpNfcLease" [ 2357.290722] env[63024]: } is initializing. {{(pid=63024) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2357.792731] env[63024]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2357.792731] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]523badec-0611-d18f-8799-7e686d5c0fa0" [ 2357.792731] env[63024]: _type = "HttpNfcLease" [ 2357.792731] env[63024]: } is ready. {{(pid=63024) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2357.793132] env[63024]: DEBUG oslo_vmware.rw_handles [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2357.793132] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]523badec-0611-d18f-8799-7e686d5c0fa0" [ 2357.793132] env[63024]: _type = "HttpNfcLease" [ 2357.793132] env[63024]: }. {{(pid=63024) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 2357.793748] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2951651b-b313-4803-979c-0313285ba079 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2357.800840] env[63024]: DEBUG oslo_vmware.rw_handles [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5288e8de-afb1-2b51-b65a-b98941acca4a/disk-0.vmdk from lease info. {{(pid=63024) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2357.801028] env[63024]: DEBUG oslo_vmware.rw_handles [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5288e8de-afb1-2b51-b65a-b98941acca4a/disk-0.vmdk for reading. {{(pid=63024) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 2357.886100] env[63024]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-af1b36cf-ce47-40f7-8434-b1a8d63fcedf {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2365.700583] env[63024]: DEBUG oslo_vmware.rw_handles [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5288e8de-afb1-2b51-b65a-b98941acca4a/disk-0.vmdk. {{(pid=63024) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2365.701547] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-563c7a50-68a9-45c8-93ca-1503a66fc98c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2365.707829] env[63024]: DEBUG oslo_vmware.rw_handles [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5288e8de-afb1-2b51-b65a-b98941acca4a/disk-0.vmdk is in state: ready. {{(pid=63024) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2365.708017] env[63024]: ERROR oslo_vmware.rw_handles [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5288e8de-afb1-2b51-b65a-b98941acca4a/disk-0.vmdk due to incomplete transfer. [ 2365.708237] env[63024]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-3305b7a2-ef5a-49a4-b96e-432c36d8df36 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2365.714921] env[63024]: DEBUG oslo_vmware.rw_handles [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5288e8de-afb1-2b51-b65a-b98941acca4a/disk-0.vmdk. {{(pid=63024) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 2365.715192] env[63024]: DEBUG nova.virt.vmwareapi.images [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Uploaded image fdbe0da7-8788-4622-a2dd-50aa8a0cccba to the Glance image server {{(pid=63024) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 2365.717428] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Destroying the VM {{(pid=63024) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 2365.717647] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-a7bcaddd-3d78-4e97-9358-443c1d7fba55 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2365.723137] env[63024]: DEBUG oslo_vmware.api [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2365.723137] env[63024]: value = "task-1952091" [ 2365.723137] env[63024]: _type = "Task" [ 2365.723137] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2365.730621] env[63024]: DEBUG oslo_vmware.api [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952091, 'name': Destroy_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2366.232864] env[63024]: DEBUG oslo_vmware.api [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952091, 'name': Destroy_Task, 'duration_secs': 0.425971} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2366.233135] env[63024]: INFO nova.virt.vmwareapi.vm_util [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Destroyed the VM [ 2366.233371] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Deleting Snapshot of the VM instance {{(pid=63024) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 2366.233614] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-dfaafbfa-3946-4afa-9312-5b2cf820b531 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2366.239725] env[63024]: DEBUG oslo_vmware.api [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2366.239725] env[63024]: value = "task-1952092" [ 2366.239725] env[63024]: _type = "Task" [ 2366.239725] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2366.246868] env[63024]: DEBUG oslo_vmware.api [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952092, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2366.749098] env[63024]: DEBUG oslo_vmware.api [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952092, 'name': RemoveSnapshot_Task, 'duration_secs': 0.329894} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2366.749420] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Deleted Snapshot of the VM instance {{(pid=63024) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 2366.749666] env[63024]: DEBUG nova.compute.manager [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2366.750397] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9df22cd8-3d07-46b9-b055-45e3b760c0be {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2367.262029] env[63024]: INFO nova.compute.manager [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Shelve offloading [ 2367.765720] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2367.766068] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c9b30305-0137-4dc5-831a-a5f3823b3334 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2367.774525] env[63024]: DEBUG oslo_vmware.api [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2367.774525] env[63024]: value = "task-1952093" [ 2367.774525] env[63024]: _type = "Task" [ 2367.774525] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2367.785072] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] VM already powered off {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 2367.785284] env[63024]: DEBUG nova.compute.manager [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2367.785958] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46b620bd-08d7-4b37-8900-6f5c3dd7d643 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2367.791586] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquiring lock "refresh_cache-fc828ce0-b08d-41f4-afb5-ea8968bbf62e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2367.791754] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquired lock "refresh_cache-fc828ce0-b08d-41f4-afb5-ea8968bbf62e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2367.791921] env[63024]: DEBUG nova.network.neutron [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2368.508493] env[63024]: DEBUG nova.network.neutron [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Updating instance_info_cache with network_info: [{"id": "d26f8bfe-a2b6-4905-9785-1c97e45252c0", "address": "fa:16:3e:16:2b:e6", "network": {"id": "5503f5f5-9a63-4bb8-bc39-97863100c3df", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1218611362-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd0c44555e30414c83750b762e243dc1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd26f8bfe-a2", "ovs_interfaceid": "d26f8bfe-a2b6-4905-9785-1c97e45252c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2369.011243] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Releasing lock "refresh_cache-fc828ce0-b08d-41f4-afb5-ea8968bbf62e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2369.219778] env[63024]: DEBUG nova.compute.manager [req-247970a8-ff94-4072-b846-57e006547f41 req-6293f831-aae8-4e79-9aca-746f9f4c1e43 service nova] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Received event network-vif-unplugged-d26f8bfe-a2b6-4905-9785-1c97e45252c0 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2369.220014] env[63024]: DEBUG oslo_concurrency.lockutils [req-247970a8-ff94-4072-b846-57e006547f41 req-6293f831-aae8-4e79-9aca-746f9f4c1e43 service nova] Acquiring lock "fc828ce0-b08d-41f4-afb5-ea8968bbf62e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2369.220264] env[63024]: DEBUG oslo_concurrency.lockutils [req-247970a8-ff94-4072-b846-57e006547f41 req-6293f831-aae8-4e79-9aca-746f9f4c1e43 service nova] Lock "fc828ce0-b08d-41f4-afb5-ea8968bbf62e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2369.220453] env[63024]: DEBUG oslo_concurrency.lockutils [req-247970a8-ff94-4072-b846-57e006547f41 req-6293f831-aae8-4e79-9aca-746f9f4c1e43 service nova] Lock "fc828ce0-b08d-41f4-afb5-ea8968bbf62e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2369.220886] env[63024]: DEBUG nova.compute.manager [req-247970a8-ff94-4072-b846-57e006547f41 req-6293f831-aae8-4e79-9aca-746f9f4c1e43 service nova] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] No waiting events found dispatching network-vif-unplugged-d26f8bfe-a2b6-4905-9785-1c97e45252c0 {{(pid=63024) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 2369.220993] env[63024]: WARNING nova.compute.manager [req-247970a8-ff94-4072-b846-57e006547f41 req-6293f831-aae8-4e79-9aca-746f9f4c1e43 service nova] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Received unexpected event network-vif-unplugged-d26f8bfe-a2b6-4905-9785-1c97e45252c0 for instance with vm_state shelved and task_state shelving_offloading. [ 2369.305842] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2369.306731] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2830373d-31fd-4911-937f-b541b92d992b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2369.314531] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2369.314741] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-444435bd-a47e-494a-b8a4-0947ba64dc76 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2369.440121] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2369.440356] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2369.440544] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Deleting the datastore file [datastore1] fc828ce0-b08d-41f4-afb5-ea8968bbf62e {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2369.440810] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8d3633ea-24ed-4e2a-924d-eaa284912508 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2369.447769] env[63024]: DEBUG oslo_vmware.api [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2369.447769] env[63024]: value = "task-1952095" [ 2369.447769] env[63024]: _type = "Task" [ 2369.447769] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2369.455719] env[63024]: DEBUG oslo_vmware.api [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952095, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2369.957929] env[63024]: DEBUG oslo_vmware.api [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952095, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.132148} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2369.958227] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2369.958422] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2369.958604] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2369.983522] env[63024]: INFO nova.scheduler.client.report [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Deleted allocations for instance fc828ce0-b08d-41f4-afb5-ea8968bbf62e [ 2370.487967] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2370.488368] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2370.488502] env[63024]: DEBUG nova.objects.instance [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lazy-loading 'resources' on Instance uuid fc828ce0-b08d-41f4-afb5-ea8968bbf62e {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2370.991040] env[63024]: DEBUG nova.objects.instance [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lazy-loading 'numa_topology' on Instance uuid fc828ce0-b08d-41f4-afb5-ea8968bbf62e {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2371.255998] env[63024]: DEBUG nova.compute.manager [req-e9975071-cc11-40af-9abb-32b1054a77fb req-50c87429-45e3-4d62-b7ed-0da30829356b service nova] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Received event network-changed-d26f8bfe-a2b6-4905-9785-1c97e45252c0 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2371.256221] env[63024]: DEBUG nova.compute.manager [req-e9975071-cc11-40af-9abb-32b1054a77fb req-50c87429-45e3-4d62-b7ed-0da30829356b service nova] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Refreshing instance network info cache due to event network-changed-d26f8bfe-a2b6-4905-9785-1c97e45252c0. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2371.256438] env[63024]: DEBUG oslo_concurrency.lockutils [req-e9975071-cc11-40af-9abb-32b1054a77fb req-50c87429-45e3-4d62-b7ed-0da30829356b service nova] Acquiring lock "refresh_cache-fc828ce0-b08d-41f4-afb5-ea8968bbf62e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2371.256585] env[63024]: DEBUG oslo_concurrency.lockutils [req-e9975071-cc11-40af-9abb-32b1054a77fb req-50c87429-45e3-4d62-b7ed-0da30829356b service nova] Acquired lock "refresh_cache-fc828ce0-b08d-41f4-afb5-ea8968bbf62e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2371.256751] env[63024]: DEBUG nova.network.neutron [req-e9975071-cc11-40af-9abb-32b1054a77fb req-50c87429-45e3-4d62-b7ed-0da30829356b service nova] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Refreshing network info cache for port d26f8bfe-a2b6-4905-9785-1c97e45252c0 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2371.432023] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2371.432023] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2371.432023] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Starting heal instance info cache {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10258}} [ 2371.432023] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Rebuilding the list of instances to heal {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10262}} [ 2371.493443] env[63024]: DEBUG nova.objects.base [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=63024) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 2371.508219] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8225b582-cafa-42eb-81d7-3b9ced32b984 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2371.517764] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfc92d71-90df-4806-861b-49a2b3642945 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2371.547217] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e12e85e-28be-46ab-b1bc-e00109fc9881 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2371.554629] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6636b1f1-43f2-46fc-8b31-54f3525c3c92 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2371.567873] env[63024]: DEBUG nova.compute.provider_tree [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2371.974874] env[63024]: INFO nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Updating ports in neutron [ 2372.006962] env[63024]: DEBUG nova.network.neutron [req-e9975071-cc11-40af-9abb-32b1054a77fb req-50c87429-45e3-4d62-b7ed-0da30829356b service nova] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Updated VIF entry in instance network info cache for port d26f8bfe-a2b6-4905-9785-1c97e45252c0. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2372.007534] env[63024]: DEBUG nova.network.neutron [req-e9975071-cc11-40af-9abb-32b1054a77fb req-50c87429-45e3-4d62-b7ed-0da30829356b service nova] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Updating instance_info_cache with network_info: [{"id": "d26f8bfe-a2b6-4905-9785-1c97e45252c0", "address": "fa:16:3e:16:2b:e6", "network": {"id": "5503f5f5-9a63-4bb8-bc39-97863100c3df", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-1218611362-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd0c44555e30414c83750b762e243dc1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapd26f8bfe-a2", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2372.013505] env[63024]: INFO nova.network.neutron [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Updating port d26f8bfe-a2b6-4905-9785-1c97e45252c0 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 2372.070750] env[63024]: DEBUG nova.scheduler.client.report [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2372.510489] env[63024]: DEBUG oslo_concurrency.lockutils [req-e9975071-cc11-40af-9abb-32b1054a77fb req-50c87429-45e3-4d62-b7ed-0da30829356b service nova] Releasing lock "refresh_cache-fc828ce0-b08d-41f4-afb5-ea8968bbf62e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2372.575773] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.087s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2373.084726] env[63024]: DEBUG oslo_concurrency.lockutils [None req-cd9d8da5-6fe0-4be7-a893-545959494e22 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lock "fc828ce0-b08d-41f4-afb5-ea8968bbf62e" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 20.453s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2373.258449] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquiring lock "fc828ce0-b08d-41f4-afb5-ea8968bbf62e" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2373.258726] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lock "fc828ce0-b08d-41f4-afb5-ea8968bbf62e" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2373.258913] env[63024]: INFO nova.compute.manager [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Unshelving [ 2373.578111] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "refresh_cache-fc828ce0-b08d-41f4-afb5-ea8968bbf62e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2373.578451] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquired lock "refresh_cache-fc828ce0-b08d-41f4-afb5-ea8968bbf62e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2373.578451] env[63024]: DEBUG nova.network.neutron [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Forcefully refreshing network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2373.578537] env[63024]: DEBUG nova.objects.instance [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lazy-loading 'info_cache' on Instance uuid fc828ce0-b08d-41f4-afb5-ea8968bbf62e {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2374.287881] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2374.288180] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2374.288432] env[63024]: DEBUG nova.objects.instance [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lazy-loading 'pci_requests' on Instance uuid fc828ce0-b08d-41f4-afb5-ea8968bbf62e {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2374.792558] env[63024]: DEBUG nova.objects.instance [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lazy-loading 'numa_topology' on Instance uuid fc828ce0-b08d-41f4-afb5-ea8968bbf62e {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2375.284941] env[63024]: DEBUG nova.network.neutron [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Updating instance_info_cache with network_info: [{"id": "d26f8bfe-a2b6-4905-9785-1c97e45252c0", "address": "fa:16:3e:16:2b:e6", "network": {"id": "5503f5f5-9a63-4bb8-bc39-97863100c3df", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1218611362-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd0c44555e30414c83750b762e243dc1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd26f8bfe-a2", "ovs_interfaceid": "d26f8bfe-a2b6-4905-9785-1c97e45252c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2375.294970] env[63024]: INFO nova.compute.claims [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2375.507280] env[63024]: DEBUG nova.compute.manager [req-4995e104-7fd9-43ff-b067-fdf2b1907ad7 req-03218895-0146-4742-b83c-fdd161191dfc service nova] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Received event network-changed-d26f8bfe-a2b6-4905-9785-1c97e45252c0 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2375.507495] env[63024]: DEBUG nova.compute.manager [req-4995e104-7fd9-43ff-b067-fdf2b1907ad7 req-03218895-0146-4742-b83c-fdd161191dfc service nova] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Refreshing instance network info cache due to event network-changed-d26f8bfe-a2b6-4905-9785-1c97e45252c0. {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11466}} [ 2375.507692] env[63024]: DEBUG oslo_concurrency.lockutils [req-4995e104-7fd9-43ff-b067-fdf2b1907ad7 req-03218895-0146-4742-b83c-fdd161191dfc service nova] Acquiring lock "refresh_cache-fc828ce0-b08d-41f4-afb5-ea8968bbf62e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2375.787579] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Releasing lock "refresh_cache-fc828ce0-b08d-41f4-afb5-ea8968bbf62e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2375.787782] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Updated the network info_cache for instance {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10329}} [ 2375.788056] env[63024]: DEBUG oslo_concurrency.lockutils [req-4995e104-7fd9-43ff-b067-fdf2b1907ad7 req-03218895-0146-4742-b83c-fdd161191dfc service nova] Acquired lock "refresh_cache-fc828ce0-b08d-41f4-afb5-ea8968bbf62e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2375.788272] env[63024]: DEBUG nova.network.neutron [req-4995e104-7fd9-43ff-b067-fdf2b1907ad7 req-03218895-0146-4742-b83c-fdd161191dfc service nova] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Refreshing network info cache for port d26f8bfe-a2b6-4905-9785-1c97e45252c0 {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 2375.789412] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2375.789608] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2375.789960] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2375.790144] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2375.790293] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2375.790444] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2375.790572] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63024) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10877}} [ 2375.790716] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager.update_available_resource {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2376.293873] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2376.328149] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03e5088f-80a0-45ac-8ca1-808c6e4c05c9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2376.335805] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db0a2ca8-56b8-4dec-9fa4-20d1f0b72c9c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2376.367581] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bf4c0db-afe1-4d20-90a1-45b2310d47e3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2376.375039] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec1cdb9b-6991-43c3-a6b1-e8bbcd64189b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2376.387826] env[63024]: DEBUG nova.compute.provider_tree [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2376.523654] env[63024]: DEBUG nova.network.neutron [req-4995e104-7fd9-43ff-b067-fdf2b1907ad7 req-03218895-0146-4742-b83c-fdd161191dfc service nova] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Updated VIF entry in instance network info cache for port d26f8bfe-a2b6-4905-9785-1c97e45252c0. {{(pid=63024) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 2376.524028] env[63024]: DEBUG nova.network.neutron [req-4995e104-7fd9-43ff-b067-fdf2b1907ad7 req-03218895-0146-4742-b83c-fdd161191dfc service nova] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Updating instance_info_cache with network_info: [{"id": "d26f8bfe-a2b6-4905-9785-1c97e45252c0", "address": "fa:16:3e:16:2b:e6", "network": {"id": "5503f5f5-9a63-4bb8-bc39-97863100c3df", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1218611362-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd0c44555e30414c83750b762e243dc1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd26f8bfe-a2", "ovs_interfaceid": "d26f8bfe-a2b6-4905-9785-1c97e45252c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2376.891200] env[63024]: DEBUG nova.scheduler.client.report [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2377.026296] env[63024]: DEBUG oslo_concurrency.lockutils [req-4995e104-7fd9-43ff-b067-fdf2b1907ad7 req-03218895-0146-4742-b83c-fdd161191dfc service nova] Releasing lock "refresh_cache-fc828ce0-b08d-41f4-afb5-ea8968bbf62e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2377.395721] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.107s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2377.397945] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 1.104s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2377.398142] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2377.398326] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63024) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2377.399205] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04759719-c6b6-4954-ad82-6fb3dade2066 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2377.407918] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-347299a7-2ad9-45d5-9d08-51f88c578c53 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2377.422786] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1268d689-ae17-43ac-865f-7e5ff7ee0e72 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2377.426919] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquiring lock "refresh_cache-fc828ce0-b08d-41f4-afb5-ea8968bbf62e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2377.427105] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquired lock "refresh_cache-fc828ce0-b08d-41f4-afb5-ea8968bbf62e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2377.427304] env[63024]: DEBUG nova.network.neutron [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Building network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 2377.431058] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f623aa02-7f04-4426-b269-7b1b85563ea0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2377.459446] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180896MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=63024) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2377.459605] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2377.459781] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2378.128560] env[63024]: DEBUG nova.network.neutron [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Updating instance_info_cache with network_info: [{"id": "d26f8bfe-a2b6-4905-9785-1c97e45252c0", "address": "fa:16:3e:16:2b:e6", "network": {"id": "5503f5f5-9a63-4bb8-bc39-97863100c3df", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1218611362-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd0c44555e30414c83750b762e243dc1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd26f8bfe-a2", "ovs_interfaceid": "d26f8bfe-a2b6-4905-9785-1c97e45252c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2378.479325] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance fc828ce0-b08d-41f4-afb5-ea8968bbf62e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2378.479572] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2378.479730] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2378.503364] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efbc2967-e25c-4df7-8c98-bc7d193aba8a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2378.510661] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37cee30d-b89f-49a9-b6fe-05c289beb0d7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2378.540700] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a9758e8-b61c-4b28-9cfa-739a63eb66fc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2378.547704] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f260b32c-4828-4c53-b1b6-be0bc767e68f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2378.560517] env[63024]: DEBUG nova.compute.provider_tree [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2378.630705] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Releasing lock "refresh_cache-fc828ce0-b08d-41f4-afb5-ea8968bbf62e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2378.657824] env[63024]: DEBUG nova.virt.hardware [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-12-22T10:49:43Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='305c2b68ec56d24f03f2d9623b7979c4',container_format='bare',created_at=2024-12-22T11:20:00Z,direct_url=,disk_format='vmdk',id=fdbe0da7-8788-4622-a2dd-50aa8a0cccba,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-313324655-shelved',owner='dd0c44555e30414c83750b762e243dc1',properties=ImageMetaProps,protected=,size=31669760,status='active',tags=,updated_at=2024-12-22T11:20:14Z,virtual_size=,visibility=), allow threads: False {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2378.658069] env[63024]: DEBUG nova.virt.hardware [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Flavor limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2378.658241] env[63024]: DEBUG nova.virt.hardware [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Image limits 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2378.658422] env[63024]: DEBUG nova.virt.hardware [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Flavor pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2378.658571] env[63024]: DEBUG nova.virt.hardware [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Image pref 0:0:0 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2378.658721] env[63024]: DEBUG nova.virt.hardware [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=63024) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2378.658930] env[63024]: DEBUG nova.virt.hardware [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2378.659112] env[63024]: DEBUG nova.virt.hardware [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2378.659289] env[63024]: DEBUG nova.virt.hardware [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Got 1 possible topologies {{(pid=63024) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2378.659459] env[63024]: DEBUG nova.virt.hardware [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2378.659674] env[63024]: DEBUG nova.virt.hardware [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=63024) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2378.660454] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4facd16e-a0bb-4157-bffc-76406320ce29 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2378.667955] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-808b811b-6e7d-4727-9ca3-e70354b99948 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2378.681103] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:16:2b:e6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '55c757ac-f8b2-466d-b634-07dbd100b312', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd26f8bfe-a2b6-4905-9785-1c97e45252c0', 'vif_model': 'vmxnet3'}] {{(pid=63024) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2378.688269] env[63024]: DEBUG oslo.service.loopingcall [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2378.688507] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Creating VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 2378.688714] env[63024]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3dc51146-3ffb-490d-8d48-17ecff5a5a03 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2378.707985] env[63024]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2378.707985] env[63024]: value = "task-1952096" [ 2378.707985] env[63024]: _type = "Task" [ 2378.707985] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2378.715163] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1952096, 'name': CreateVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2379.064207] env[63024]: DEBUG nova.scheduler.client.report [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2379.217695] env[63024]: DEBUG oslo_vmware.api [-] Task: {'id': task-1952096, 'name': CreateVM_Task, 'duration_secs': 0.470623} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2379.217865] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Created VM on the ESX host {{(pid=63024) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 2379.218557] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/fdbe0da7-8788-4622-a2dd-50aa8a0cccba" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2379.218809] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquired lock "[datastore1] devstack-image-cache_base/fdbe0da7-8788-4622-a2dd-50aa8a0cccba" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2379.219096] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/fdbe0da7-8788-4622-a2dd-50aa8a0cccba" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 2379.219347] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd4341a0-2fa2-431c-92ed-b9367ce9955a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.223535] env[63024]: DEBUG oslo_vmware.api [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2379.223535] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52821f7e-4b02-a3e6-d6ee-e27873434112" [ 2379.223535] env[63024]: _type = "Task" [ 2379.223535] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2379.230709] env[63024]: DEBUG oslo_vmware.api [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]52821f7e-4b02-a3e6-d6ee-e27873434112, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2379.569498] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63024) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2379.569868] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.110s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2379.733561] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Releasing lock "[datastore1] devstack-image-cache_base/fdbe0da7-8788-4622-a2dd-50aa8a0cccba" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2379.733781] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Processing image fdbe0da7-8788-4622-a2dd-50aa8a0cccba {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2379.734023] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/fdbe0da7-8788-4622-a2dd-50aa8a0cccba/fdbe0da7-8788-4622-a2dd-50aa8a0cccba.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2379.734177] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquired lock "[datastore1] devstack-image-cache_base/fdbe0da7-8788-4622-a2dd-50aa8a0cccba/fdbe0da7-8788-4622-a2dd-50aa8a0cccba.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2379.734359] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2379.734609] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a454b1dc-9744-41d8-b561-e6213123a31f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.742744] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2379.742913] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=63024) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 2379.743583] env[63024]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d634da06-3822-4b03-b986-51c30fcfc3f3 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2379.748152] env[63024]: DEBUG oslo_vmware.api [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2379.748152] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5294d682-9bd5-ba14-d066-4543f70f5533" [ 2379.748152] env[63024]: _type = "Task" [ 2379.748152] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2379.756593] env[63024]: DEBUG oslo_vmware.api [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5294d682-9bd5-ba14-d066-4543f70f5533, 'name': SearchDatastore_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2380.258348] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Preparing fetch location {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2380.258622] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Fetch image to [datastore1] OSTACK_IMG_f4568552-f77e-49a1-82b3-ca31d5b32f1b/OSTACK_IMG_f4568552-f77e-49a1-82b3-ca31d5b32f1b.vmdk {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2380.258803] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Downloading stream optimized image fdbe0da7-8788-4622-a2dd-50aa8a0cccba to [datastore1] OSTACK_IMG_f4568552-f77e-49a1-82b3-ca31d5b32f1b/OSTACK_IMG_f4568552-f77e-49a1-82b3-ca31d5b32f1b.vmdk on the data store datastore1 as vApp {{(pid=63024) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 2380.258970] env[63024]: DEBUG nova.virt.vmwareapi.images [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Downloading image file data fdbe0da7-8788-4622-a2dd-50aa8a0cccba to the ESX as VM named 'OSTACK_IMG_f4568552-f77e-49a1-82b3-ca31d5b32f1b' {{(pid=63024) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 2380.323979] env[63024]: DEBUG oslo_vmware.rw_handles [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 2380.323979] env[63024]: value = "resgroup-9" [ 2380.323979] env[63024]: _type = "ResourcePool" [ 2380.323979] env[63024]: }. {{(pid=63024) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 2380.323979] env[63024]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-2fb97360-1f86-42d4-bd11-6cb55fae1dbc {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.341845] env[63024]: DEBUG oslo_vmware.rw_handles [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lease: (returnval){ [ 2380.341845] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5224484c-bc71-e72b-9626-4fbbb7df3303" [ 2380.341845] env[63024]: _type = "HttpNfcLease" [ 2380.341845] env[63024]: } obtained for vApp import into resource pool (val){ [ 2380.341845] env[63024]: value = "resgroup-9" [ 2380.341845] env[63024]: _type = "ResourcePool" [ 2380.341845] env[63024]: }. {{(pid=63024) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 2380.342175] env[63024]: DEBUG oslo_vmware.api [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the lease: (returnval){ [ 2380.342175] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5224484c-bc71-e72b-9626-4fbbb7df3303" [ 2380.342175] env[63024]: _type = "HttpNfcLease" [ 2380.342175] env[63024]: } to be ready. {{(pid=63024) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 2380.347766] env[63024]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2380.347766] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5224484c-bc71-e72b-9626-4fbbb7df3303" [ 2380.347766] env[63024]: _type = "HttpNfcLease" [ 2380.347766] env[63024]: } is initializing. {{(pid=63024) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 2380.849769] env[63024]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 2380.849769] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5224484c-bc71-e72b-9626-4fbbb7df3303" [ 2380.849769] env[63024]: _type = "HttpNfcLease" [ 2380.849769] env[63024]: } is ready. {{(pid=63024) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 2380.850204] env[63024]: DEBUG oslo_vmware.rw_handles [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 2380.850204] env[63024]: value = "session[52c773c4-1f91-085c-e0db-35fc9f46ade1]5224484c-bc71-e72b-9626-4fbbb7df3303" [ 2380.850204] env[63024]: _type = "HttpNfcLease" [ 2380.850204] env[63024]: }. {{(pid=63024) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 2380.850780] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a56d894b-48e9-4eb5-9ac2-40ad99ab1f99 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2380.857548] env[63024]: DEBUG oslo_vmware.rw_handles [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528b18e9-ac90-76e2-7e43-d8ccfc510a95/disk-0.vmdk from lease info. {{(pid=63024) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 2380.857725] env[63024]: DEBUG oslo_vmware.rw_handles [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Creating HTTP connection to write to file with size = 31669760 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528b18e9-ac90-76e2-7e43-d8ccfc510a95/disk-0.vmdk. {{(pid=63024) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2380.926059] env[63024]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-96c56b3f-6668-41c9-ada9-9d561960ad40 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.070162] env[63024]: DEBUG oslo_vmware.rw_handles [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Completed reading data from the image iterator. {{(pid=63024) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2382.070547] env[63024]: DEBUG oslo_vmware.rw_handles [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528b18e9-ac90-76e2-7e43-d8ccfc510a95/disk-0.vmdk. {{(pid=63024) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 2382.071453] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8af3117-5d5b-4e69-a281-fee3c48bb516 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.078268] env[63024]: DEBUG oslo_vmware.rw_handles [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528b18e9-ac90-76e2-7e43-d8ccfc510a95/disk-0.vmdk is in state: ready. {{(pid=63024) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 2382.078451] env[63024]: DEBUG oslo_vmware.rw_handles [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528b18e9-ac90-76e2-7e43-d8ccfc510a95/disk-0.vmdk. {{(pid=63024) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 2382.078710] env[63024]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-67f9b6d6-d135-48e9-bf1c-c6c9a6aa1ce0 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.266948] env[63024]: DEBUG oslo_vmware.rw_handles [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/528b18e9-ac90-76e2-7e43-d8ccfc510a95/disk-0.vmdk. {{(pid=63024) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 2382.267205] env[63024]: INFO nova.virt.vmwareapi.images [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Downloaded image file data fdbe0da7-8788-4622-a2dd-50aa8a0cccba [ 2382.268052] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1d70733-9348-492b-9b83-a6a955ffef8d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.283387] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-30e43811-cfef-488e-86f0-8fbfbd491f46 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.314567] env[63024]: INFO nova.virt.vmwareapi.images [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] The imported VM was unregistered [ 2382.316887] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Caching image {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2382.317135] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Creating directory with path [datastore1] devstack-image-cache_base/fdbe0da7-8788-4622-a2dd-50aa8a0cccba {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2382.317442] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d7e28785-bb74-4b11-8b6b-1d65dfd8d0d7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.328297] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Created directory with path [datastore1] devstack-image-cache_base/fdbe0da7-8788-4622-a2dd-50aa8a0cccba {{(pid=63024) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2382.328499] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_f4568552-f77e-49a1-82b3-ca31d5b32f1b/OSTACK_IMG_f4568552-f77e-49a1-82b3-ca31d5b32f1b.vmdk to [datastore1] devstack-image-cache_base/fdbe0da7-8788-4622-a2dd-50aa8a0cccba/fdbe0da7-8788-4622-a2dd-50aa8a0cccba.vmdk. {{(pid=63024) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 2382.328722] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-ceb9e6d7-4ac3-4f2a-8e5d-076680f21540 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2382.335198] env[63024]: DEBUG oslo_vmware.api [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2382.335198] env[63024]: value = "task-1952099" [ 2382.335198] env[63024]: _type = "Task" [ 2382.335198] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2382.342644] env[63024]: DEBUG oslo_vmware.api [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952099, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2382.846203] env[63024]: DEBUG oslo_vmware.api [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952099, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2383.346453] env[63024]: DEBUG oslo_vmware.api [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952099, 'name': MoveVirtualDisk_Task} progress is 46%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2383.849119] env[63024]: DEBUG oslo_vmware.api [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952099, 'name': MoveVirtualDisk_Task} progress is 71%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2384.348233] env[63024]: DEBUG oslo_vmware.api [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952099, 'name': MoveVirtualDisk_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2384.849121] env[63024]: DEBUG oslo_vmware.api [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952099, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.389242} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2384.849383] env[63024]: INFO nova.virt.vmwareapi.ds_util [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_f4568552-f77e-49a1-82b3-ca31d5b32f1b/OSTACK_IMG_f4568552-f77e-49a1-82b3-ca31d5b32f1b.vmdk to [datastore1] devstack-image-cache_base/fdbe0da7-8788-4622-a2dd-50aa8a0cccba/fdbe0da7-8788-4622-a2dd-50aa8a0cccba.vmdk. [ 2384.849577] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Cleaning up location [datastore1] OSTACK_IMG_f4568552-f77e-49a1-82b3-ca31d5b32f1b {{(pid=63024) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 2384.849743] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_f4568552-f77e-49a1-82b3-ca31d5b32f1b {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2384.849988] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-65336205-d175-4a1e-99dd-ea4c3f907e4e {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2384.856312] env[63024]: DEBUG oslo_vmware.api [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2384.856312] env[63024]: value = "task-1952100" [ 2384.856312] env[63024]: _type = "Task" [ 2384.856312] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2384.863384] env[63024]: DEBUG oslo_vmware.api [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952100, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2385.367207] env[63024]: DEBUG oslo_vmware.api [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952100, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.032456} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2385.367581] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2385.367752] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Releasing lock "[datastore1] devstack-image-cache_base/fdbe0da7-8788-4622-a2dd-50aa8a0cccba/fdbe0da7-8788-4622-a2dd-50aa8a0cccba.vmdk" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2385.367890] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/fdbe0da7-8788-4622-a2dd-50aa8a0cccba/fdbe0da7-8788-4622-a2dd-50aa8a0cccba.vmdk to [datastore1] fc828ce0-b08d-41f4-afb5-ea8968bbf62e/fc828ce0-b08d-41f4-afb5-ea8968bbf62e.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 2385.368151] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c8a07ae0-343c-4565-a4da-dd022fed244f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2385.374621] env[63024]: DEBUG oslo_vmware.api [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2385.374621] env[63024]: value = "task-1952101" [ 2385.374621] env[63024]: _type = "Task" [ 2385.374621] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2385.382059] env[63024]: DEBUG oslo_vmware.api [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952101, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2385.884606] env[63024]: DEBUG oslo_vmware.api [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952101, 'name': CopyVirtualDisk_Task} progress is 24%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2386.385640] env[63024]: DEBUG oslo_vmware.api [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952101, 'name': CopyVirtualDisk_Task} progress is 49%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2386.886422] env[63024]: DEBUG oslo_vmware.api [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952101, 'name': CopyVirtualDisk_Task} progress is 71%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2387.386744] env[63024]: DEBUG oslo_vmware.api [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952101, 'name': CopyVirtualDisk_Task} progress is 94%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2387.887642] env[63024]: DEBUG oslo_vmware.api [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952101, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.099026} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2387.887908] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/fdbe0da7-8788-4622-a2dd-50aa8a0cccba/fdbe0da7-8788-4622-a2dd-50aa8a0cccba.vmdk to [datastore1] fc828ce0-b08d-41f4-afb5-ea8968bbf62e/fc828ce0-b08d-41f4-afb5-ea8968bbf62e.vmdk {{(pid=63024) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 2387.888714] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc600fca-7e04-40f4-8e65-179ffc45264d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2387.909952] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Reconfiguring VM instance instance-0000007d to attach disk [datastore1] fc828ce0-b08d-41f4-afb5-ea8968bbf62e/fc828ce0-b08d-41f4-afb5-ea8968bbf62e.vmdk or device None with type streamOptimized {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 2387.910200] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-52e2ac38-128b-4c7d-9294-1dd82a8a03c7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2387.929199] env[63024]: DEBUG oslo_vmware.api [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2387.929199] env[63024]: value = "task-1952102" [ 2387.929199] env[63024]: _type = "Task" [ 2387.929199] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2387.936729] env[63024]: DEBUG oslo_vmware.api [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952102, 'name': ReconfigVM_Task} progress is 5%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2388.439205] env[63024]: DEBUG oslo_vmware.api [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952102, 'name': ReconfigVM_Task, 'duration_secs': 0.321163} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2388.439586] env[63024]: DEBUG nova.virt.vmwareapi.volumeops [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Reconfigured VM instance instance-0000007d to attach disk [datastore1] fc828ce0-b08d-41f4-afb5-ea8968bbf62e/fc828ce0-b08d-41f4-afb5-ea8968bbf62e.vmdk or device None with type streamOptimized {{(pid=63024) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 2388.440089] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-da0eda41-090e-4da2-aa09-55a73604183f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2388.446214] env[63024]: DEBUG oslo_vmware.api [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2388.446214] env[63024]: value = "task-1952103" [ 2388.446214] env[63024]: _type = "Task" [ 2388.446214] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2388.453239] env[63024]: DEBUG oslo_vmware.api [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952103, 'name': Rename_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2388.955375] env[63024]: DEBUG oslo_vmware.api [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952103, 'name': Rename_Task, 'duration_secs': 0.136444} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2388.955646] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Powering on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 2388.955889] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-45814896-a9ea-49d5-9ff7-b7e07504aa15 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2388.961691] env[63024]: DEBUG oslo_vmware.api [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2388.961691] env[63024]: value = "task-1952104" [ 2388.961691] env[63024]: _type = "Task" [ 2388.961691] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2388.968674] env[63024]: DEBUG oslo_vmware.api [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952104, 'name': PowerOnVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2389.471018] env[63024]: DEBUG oslo_vmware.api [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952104, 'name': PowerOnVM_Task, 'duration_secs': 0.451808} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2389.471347] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Powered on the VM {{(pid=63024) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 2389.563042] env[63024]: DEBUG nova.compute.manager [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Checking state {{(pid=63024) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 2389.563953] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad0f3068-8e2c-4196-b274-ceb9ade5b61b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2390.082394] env[63024]: DEBUG oslo_concurrency.lockutils [None req-5bb1f408-0661-49ce-8343-c363ff41ec04 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lock "fc828ce0-b08d-41f4-afb5-ea8968bbf62e" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 16.824s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2410.417641] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2410.418032] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Cleaning up deleted instances with incomplete migration {{(pid=63024) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11596}} [ 2414.920120] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2415.416681] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager.update_available_resource {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2415.920808] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2415.921185] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2415.921224] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2415.921384] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=63024) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2415.922331] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b6deebb-2550-4f52-8960-b9cc492badd2 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2415.930830] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0bc30bc-553b-44eb-936b-eda08efa448a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2415.944639] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d476b5d0-6f4e-4fc0-9dbf-ddb906f26969 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2415.950720] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59b14773-cc4a-4a4c-9e0b-5b3ede138c38 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2415.981020] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181127MB free_disk=171GB free_vcpus=48 pci_devices=None {{(pid=63024) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2415.981184] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2415.981375] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2417.005360] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Instance fc828ce0-b08d-41f4-afb5-ea8968bbf62e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=63024) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2417.005611] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2417.005704] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=63024) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2417.032017] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-579e304f-e0cc-4fc1-b62b-493d17f0982d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2417.037817] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0a7cbd8-fffc-4a69-bebd-56dbb5d1d419 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2417.067216] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d243d96-d2e0-464a-bb4c-07adf89ef615 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2417.074089] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-571793d8-68a2-4b65-bed8-1811be6d771f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2417.086857] env[63024]: DEBUG nova.compute.provider_tree [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2417.591114] env[63024]: DEBUG nova.scheduler.client.report [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2418.096741] env[63024]: DEBUG nova.compute.resource_tracker [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=63024) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2418.097349] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.116s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2419.097481] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2419.097823] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2419.411893] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2420.416897] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2421.417193] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2421.417415] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Starting heal instance info cache {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10258}} [ 2421.417500] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Rebuilding the list of instances to heal {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10262}} [ 2421.947166] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquiring lock "refresh_cache-fc828ce0-b08d-41f4-afb5-ea8968bbf62e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 2421.947334] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Acquired lock "refresh_cache-fc828ce0-b08d-41f4-afb5-ea8968bbf62e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 2421.947483] env[63024]: DEBUG nova.network.neutron [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Forcefully refreshing network info cache for instance {{(pid=63024) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2061}} [ 2421.947631] env[63024]: DEBUG nova.objects.instance [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Lazy-loading 'info_cache' on Instance uuid fc828ce0-b08d-41f4-afb5-ea8968bbf62e {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2423.668075] env[63024]: DEBUG nova.network.neutron [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Updating instance_info_cache with network_info: [{"id": "d26f8bfe-a2b6-4905-9785-1c97e45252c0", "address": "fa:16:3e:16:2b:e6", "network": {"id": "5503f5f5-9a63-4bb8-bc39-97863100c3df", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-1218611362-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.168", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dd0c44555e30414c83750b762e243dc1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "55c757ac-f8b2-466d-b634-07dbd100b312", "external-id": "nsx-vlan-transportzone-159", "segmentation_id": 159, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd26f8bfe-a2", "ovs_interfaceid": "d26f8bfe-a2b6-4905-9785-1c97e45252c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2424.171137] env[63024]: DEBUG oslo_concurrency.lockutils [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Releasing lock "refresh_cache-fc828ce0-b08d-41f4-afb5-ea8968bbf62e" {{(pid=63024) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 2424.171398] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Updated the network info_cache for instance {{(pid=63024) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10329}} [ 2424.171585] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2424.171775] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2424.919840] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2425.424230] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2425.424392] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=63024) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10877}} [ 2425.424560] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2425.424937] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Getting list of instances from cluster (obj){ [ 2425.424937] env[63024]: value = "domain-c8" [ 2425.424937] env[63024]: _type = "ClusterComputeResource" [ 2425.424937] env[63024]: } {{(pid=63024) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 2425.426025] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-691197a8-0518-4147-ab6b-3ed6e65fb24f {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2425.436121] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Got total of 1 instances {{(pid=63024) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 2426.401312] env[63024]: DEBUG oslo_concurrency.lockutils [None req-556d36e6-4e88-4a12-baaf-1500a77bfc42 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquiring lock "fc828ce0-b08d-41f4-afb5-ea8968bbf62e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2426.401681] env[63024]: DEBUG oslo_concurrency.lockutils [None req-556d36e6-4e88-4a12-baaf-1500a77bfc42 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lock "fc828ce0-b08d-41f4-afb5-ea8968bbf62e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2426.401813] env[63024]: DEBUG oslo_concurrency.lockutils [None req-556d36e6-4e88-4a12-baaf-1500a77bfc42 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquiring lock "fc828ce0-b08d-41f4-afb5-ea8968bbf62e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2426.402015] env[63024]: DEBUG oslo_concurrency.lockutils [None req-556d36e6-4e88-4a12-baaf-1500a77bfc42 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lock "fc828ce0-b08d-41f4-afb5-ea8968bbf62e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2426.402198] env[63024]: DEBUG oslo_concurrency.lockutils [None req-556d36e6-4e88-4a12-baaf-1500a77bfc42 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lock "fc828ce0-b08d-41f4-afb5-ea8968bbf62e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2426.405751] env[63024]: INFO nova.compute.manager [None req-556d36e6-4e88-4a12-baaf-1500a77bfc42 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Terminating instance [ 2426.910264] env[63024]: DEBUG nova.compute.manager [None req-556d36e6-4e88-4a12-baaf-1500a77bfc42 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Start destroying the instance on the hypervisor. {{(pid=63024) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 2426.910538] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-556d36e6-4e88-4a12-baaf-1500a77bfc42 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Destroying instance {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 2426.911447] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b5e1348-8150-4b2f-8fbf-edff1afec96a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2426.919804] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-556d36e6-4e88-4a12-baaf-1500a77bfc42 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Powering off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 2426.920043] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-95dcfa8e-80f4-438f-9e83-adcdfcb35b1a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2426.926812] env[63024]: DEBUG oslo_vmware.api [None req-556d36e6-4e88-4a12-baaf-1500a77bfc42 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2426.926812] env[63024]: value = "task-1952105" [ 2426.926812] env[63024]: _type = "Task" [ 2426.926812] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2426.934358] env[63024]: DEBUG oslo_vmware.api [None req-556d36e6-4e88-4a12-baaf-1500a77bfc42 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952105, 'name': PowerOffVM_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2427.436028] env[63024]: DEBUG oslo_vmware.api [None req-556d36e6-4e88-4a12-baaf-1500a77bfc42 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952105, 'name': PowerOffVM_Task, 'duration_secs': 0.200716} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2427.436331] env[63024]: DEBUG nova.virt.vmwareapi.vm_util [None req-556d36e6-4e88-4a12-baaf-1500a77bfc42 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Powered off the VM {{(pid=63024) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 2427.436493] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-556d36e6-4e88-4a12-baaf-1500a77bfc42 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Unregistering the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 2427.436740] env[63024]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-04040e82-64b6-4ba3-ba03-707afcee8889 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2427.616878] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-556d36e6-4e88-4a12-baaf-1500a77bfc42 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Unregistered the VM {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 2427.617151] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-556d36e6-4e88-4a12-baaf-1500a77bfc42 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Deleting contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 2427.617350] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-556d36e6-4e88-4a12-baaf-1500a77bfc42 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Deleting the datastore file [datastore1] fc828ce0-b08d-41f4-afb5-ea8968bbf62e {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2427.617627] env[63024]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-57f26966-b9d4-4c36-a822-073c0ae14841 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2427.624599] env[63024]: DEBUG oslo_vmware.api [None req-556d36e6-4e88-4a12-baaf-1500a77bfc42 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for the task: (returnval){ [ 2427.624599] env[63024]: value = "task-1952107" [ 2427.624599] env[63024]: _type = "Task" [ 2427.624599] env[63024]: } to complete. {{(pid=63024) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2427.631825] env[63024]: DEBUG oslo_vmware.api [None req-556d36e6-4e88-4a12-baaf-1500a77bfc42 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952107, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2428.134023] env[63024]: DEBUG oslo_vmware.api [None req-556d36e6-4e88-4a12-baaf-1500a77bfc42 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Task: {'id': task-1952107, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142498} completed successfully. {{(pid=63024) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2428.134296] env[63024]: DEBUG nova.virt.vmwareapi.ds_util [None req-556d36e6-4e88-4a12-baaf-1500a77bfc42 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Deleted the datastore file {{(pid=63024) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2428.134481] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-556d36e6-4e88-4a12-baaf-1500a77bfc42 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Deleted contents of the VM from datastore datastore1 {{(pid=63024) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 2428.134656] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-556d36e6-4e88-4a12-baaf-1500a77bfc42 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Instance destroyed {{(pid=63024) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 2428.134828] env[63024]: INFO nova.compute.manager [None req-556d36e6-4e88-4a12-baaf-1500a77bfc42 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Took 1.22 seconds to destroy the instance on the hypervisor. [ 2428.135079] env[63024]: DEBUG oslo.service.loopingcall [None req-556d36e6-4e88-4a12-baaf-1500a77bfc42 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=63024) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2428.135272] env[63024]: DEBUG nova.compute.manager [-] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Deallocating network for instance {{(pid=63024) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 2428.135367] env[63024]: DEBUG nova.network.neutron [-] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] deallocate_for_instance() {{(pid=63024) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 2428.625521] env[63024]: DEBUG nova.compute.manager [req-a9e3e55d-2eab-46e3-85c7-4482ba08e075 req-24e9b1ed-22b3-44e6-a175-5f76b9020fcb service nova] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Received event network-vif-deleted-d26f8bfe-a2b6-4905-9785-1c97e45252c0 {{(pid=63024) external_instance_event /opt/stack/nova/nova/compute/manager.py:11461}} [ 2428.625814] env[63024]: INFO nova.compute.manager [req-a9e3e55d-2eab-46e3-85c7-4482ba08e075 req-24e9b1ed-22b3-44e6-a175-5f76b9020fcb service nova] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Neutron deleted interface d26f8bfe-a2b6-4905-9785-1c97e45252c0; detaching it from the instance and deleting it from the info cache [ 2428.625933] env[63024]: DEBUG nova.network.neutron [req-a9e3e55d-2eab-46e3-85c7-4482ba08e075 req-24e9b1ed-22b3-44e6-a175-5f76b9020fcb service nova] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2429.106027] env[63024]: DEBUG nova.network.neutron [-] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Updating instance_info_cache with network_info: [] {{(pid=63024) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2429.128807] env[63024]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3a4aed67-347f-4c0d-b9c5-6e1b3a0f4db7 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2429.138332] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85a2e8fd-71c4-4f41-88b2-b8d913d1783b {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2429.160952] env[63024]: DEBUG nova.compute.manager [req-a9e3e55d-2eab-46e3-85c7-4482ba08e075 req-24e9b1ed-22b3-44e6-a175-5f76b9020fcb service nova] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Detach interface failed, port_id=d26f8bfe-a2b6-4905-9785-1c97e45252c0, reason: Instance fc828ce0-b08d-41f4-afb5-ea8968bbf62e could not be found. {{(pid=63024) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11295}} [ 2429.608554] env[63024]: INFO nova.compute.manager [-] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Took 1.47 seconds to deallocate network for instance. [ 2430.115203] env[63024]: DEBUG oslo_concurrency.lockutils [None req-556d36e6-4e88-4a12-baaf-1500a77bfc42 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 2430.115517] env[63024]: DEBUG oslo_concurrency.lockutils [None req-556d36e6-4e88-4a12-baaf-1500a77bfc42 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 2430.115700] env[63024]: DEBUG nova.objects.instance [None req-556d36e6-4e88-4a12-baaf-1500a77bfc42 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lazy-loading 'resources' on Instance uuid fc828ce0-b08d-41f4-afb5-ea8968bbf62e {{(pid=63024) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 2430.651050] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0556507-70f9-44e5-93e5-7d2fabf0fb8c {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2430.658674] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92f36924-d782-44f5-99fb-3b9e7d65881d {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2430.687748] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76b82862-b900-4aae-bc84-53ad5afdd0ba {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2430.694237] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-836f9931-918a-4fbc-9371-2c71617674b9 {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2430.706549] env[63024]: DEBUG nova.compute.provider_tree [None req-556d36e6-4e88-4a12-baaf-1500a77bfc42 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Inventory has not changed in ProviderTree for provider: 89dfa68a-133e-436f-a9f1-86051f9fb96b {{(pid=63024) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2431.210211] env[63024]: DEBUG nova.scheduler.client.report [None req-556d36e6-4e88-4a12-baaf-1500a77bfc42 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Inventory has not changed for provider 89dfa68a-133e-436f-a9f1-86051f9fb96b based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 171, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=63024) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:955}} [ 2431.716067] env[63024]: DEBUG oslo_concurrency.lockutils [None req-556d36e6-4e88-4a12-baaf-1500a77bfc42 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.600s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2431.734622] env[63024]: INFO nova.scheduler.client.report [None req-556d36e6-4e88-4a12-baaf-1500a77bfc42 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Deleted allocations for instance fc828ce0-b08d-41f4-afb5-ea8968bbf62e [ 2432.242844] env[63024]: DEBUG oslo_concurrency.lockutils [None req-556d36e6-4e88-4a12-baaf-1500a77bfc42 tempest-AttachVolumeShelveTestJSON-1409740924 tempest-AttachVolumeShelveTestJSON-1409740924-project-member] Lock "fc828ce0-b08d-41f4-afb5-ea8968bbf62e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.841s {{(pid=63024) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 2437.417917] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2437.418209] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Cleaning up deleted instances {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11558}} [ 2437.926463] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] There are 19 instances to clean {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11567}} [ 2437.926607] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 9b3a4e94-a40b-4498-8aae-f72cc9acdeef] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2438.430469] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: fc828ce0-b08d-41f4-afb5-ea8968bbf62e] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2438.934390] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 2e59f840-26bf-4192-b1ee-3645e9a64d1a] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2439.437922] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: e79d7141-00e5-40c9-a88f-244a3ae685d8] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2439.941376] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 1013a279-f79d-467e-a37e-7e66f77db625] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2440.445201] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 6917758b-4b68-4a5a-b7e5-b2ffdade19d7] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2440.948437] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 7d78b891-34c0-46dd-8b0d-ce80517232e1] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2441.451663] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 233d087b-923e-46d4-a47f-b024583ce0f8] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2441.955764] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 8901e234-22a9-4523-8658-411aa19e01e0] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2442.459476] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 5919cc21-67b8-47d4-9909-bc972b42914d] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2442.962524] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 4c39a074-d380-46a3-b1cc-81d72034b743] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2443.466297] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 14bafeba-9f5b-4488-b29c-38939973deb9] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2443.969736] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 94893f45-fb96-463b-82a9-e2fd884b81f8] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2444.472995] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: a483e6b5-a192-4cfe-be36-1ce0667f5697] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2444.976572] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: fa326fe2-c00e-4379-954a-9b3275328abc] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2445.480698] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: cb038d54-b785-4930-b8a5-b309c5f4b58d] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2445.983918] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 5192ad93-a4e9-4aa0-983d-186ab17360f0] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2446.487330] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: 0d253199-adf8-45c0-a6bf-b11c12b08688] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2446.990661] env[63024]: DEBUG nova.compute.manager [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] [instance: e8ad74ce-7862-4574-98e7-14bc54bd5d6c] Instance has had 0 of 5 cleanup attempts {{(pid=63024) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11571}} [ 2457.121107] env[63024]: DEBUG oslo_service.periodic_task [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Running periodic task ComputeManager._sync_power_states {{(pid=63024) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2457.623714] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Getting list of instances from cluster (obj){ [ 2457.623714] env[63024]: value = "domain-c8" [ 2457.623714] env[63024]: _type = "ClusterComputeResource" [ 2457.623714] env[63024]: } {{(pid=63024) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 2457.624844] env[63024]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b69e635-716b-47ee-b5c2-a4e7c8b7d43a {{(pid=63024) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2457.633596] env[63024]: DEBUG nova.virt.vmwareapi.vmops [None req-206080d2-197e-4117-9dd6-90cbc745d97a None None] Got total of 0 instances {{(pid=63024) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}}